aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--bootstrap/bin/start.bootbin5251 -> 5249 bytes
-rw-r--r--bootstrap/bin/start_clean.bootbin5251 -> 5249 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/beam_asm.beambin11676 -> 11676 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/beam_block.beambin15348 -> 15348 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/beam_bool.beambin16340 -> 16308 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/beam_clean.beambin11256 -> 9760 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/beam_dead.beambin10212 -> 12136 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/beam_disasm.beambin26320 -> 26320 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/beam_except.beambin3564 -> 3564 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/beam_flatten.beambin3148 -> 3020 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/beam_jump.beambin8700 -> 9428 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/beam_receive.beambin6480 -> 6484 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/beam_split.beambin2444 -> 2416 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/beam_type.beambin14488 -> 14808 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/beam_utils.beambin13716 -> 13552 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/beam_validator.beambin36256 -> 31648 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/beam_z.beambin2748 -> 2380 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/cerl.beambin32408 -> 32112 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/cerl_inline.beambin39188 -> 39196 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/compile.beambin39832 -> 39712 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/compiler.app3
-rw-r--r--bootstrap/lib/compiler/ebin/compiler.appup2
-rw-r--r--bootstrap/lib/compiler/ebin/core_lib.beambin5980 -> 5240 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/core_lint.beambin13664 -> 13656 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/core_parse.beambin42968 -> 50352 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/core_pp.beambin13196 -> 13252 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/core_scan.beambin6672 -> 6692 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/erl_bifs.beambin2180 -> 2188 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/rec_env.beambin4772 -> 4772 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/sys_core_dsetel.beambin7332 -> 7340 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/sys_core_fold.beambin50268 -> 47012 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/sys_core_fold_lists.beambin0 -> 4596 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/sys_core_inline.beambin4272 -> 4276 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/sys_pre_expand.beambin14972 -> 14980 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/v3_core.beambin56264 -> 55012 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/v3_kernel.beambin47584 -> 47576 bytes
-rw-r--r--bootstrap/lib/compiler/ebin/v3_kernel_pp.beambin12644 -> 12556 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/application.beambin4572 -> 4564 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/application_controller.beambin32180 -> 32120 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/application_master.beambin6744 -> 6728 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/code_server.beambin28572 -> 28580 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/disk_log.beambin36596 -> 36488 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/disk_log_1.beambin25048 -> 25016 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/dist_ac.beambin26652 -> 26660 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/erl_epmd.beambin7096 -> 7092 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/file.beambin14500 -> 14568 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/file_io_server.beambin14400 -> 14400 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/global.beambin32688 -> 32688 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/group.beambin13540 -> 14160 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/hipe_unified_loader.beambin13516 -> 13472 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/inet.beambin23716 -> 23068 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/inet6_tcp.beambin2680 -> 2676 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/inet6_tcp_dist.beambin6240 -> 6248 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/inet_config.beambin7812 -> 7796 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/inet_db.beambin26708 -> 26692 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/inet_dns.beambin19768 -> 19812 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/inet_parse.beambin12944 -> 12904 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/inet_res.beambin15080 -> 15092 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/inet_tcp.beambin2488 -> 2484 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/inet_tcp_dist.beambin6524 -> 6532 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/kernel.app2
-rw-r--r--bootstrap/lib/kernel/ebin/kernel.appup2
-rw-r--r--bootstrap/lib/kernel/ebin/kernel.beambin3852 -> 3852 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/kernel_config.beambin2760 -> 2768 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/net_kernel.beambin22752 -> 22760 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/os.beambin5716 -> 5764 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/pg2.beambin7900 -> 7852 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/ram_file.beambin7032 -> 7044 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/standard_error.beambin3668 -> 3864 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/user.beambin11588 -> 11588 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/user_drv.beambin10356 -> 11324 bytes
-rw-r--r--bootstrap/lib/kernel/ebin/wrap_log_reader.beambin3380 -> 3372 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/beam_lib.beambin18188 -> 18200 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/binary.beambin3644 -> 3812 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/c.beambin13956 -> 14096 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/calendar.beambin5176 -> 5172 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/dets.beambin53656 -> 53736 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/dets_server.beambin6956 -> 7028 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/dets_utils.beambin28880 -> 28784 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/dets_v8.beambin27616 -> 27592 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/dets_v9.beambin51024 -> 50172 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/dict.beambin9216 -> 9332 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/digraph_utils.beambin6852 -> 6848 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/edlin.beambin9956 -> 9956 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/edlin_expand.beambin3168 -> 3164 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/epp.beambin28540 -> 28544 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/erl_expand_records.beambin22692 -> 22716 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/erl_lint.beambin90728 -> 90616 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/erl_parse.beambin75936 -> 80572 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/erl_pp.beambin27184 -> 26628 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/erl_scan.beambin30044 -> 29696 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/erl_tar.beambin15664 -> 17248 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/error_logger_file_h.beambin5128 -> 5124 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/error_logger_tty_h.beambin5056 -> 5052 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/escript.beambin17412 -> 17404 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/ets.beambin22292 -> 22324 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/file_sorter.beambin30804 -> 30804 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/filelib.beambin8072 -> 8088 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/filename.beambin12512 -> 12536 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/gb_sets.beambin8284 -> 8284 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/gb_trees.beambin4992 -> 4988 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/gen.beambin4292 -> 4296 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/gen_event.beambin19252 -> 19260 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/gen_server.beambin18452 -> 19176 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/io.beambin6836 -> 6696 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/io_lib.beambin9856 -> 9768 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/io_lib_format.beambin12424 -> 12404 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/io_lib_fread.beambin7404 -> 7384 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/io_lib_pretty.beambin15196 -> 15256 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/lib.beambin9652 -> 9660 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/lists.beambin29712 -> 29708 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/math.beambin1136 -> 1176 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/ms_transform.beambin20384 -> 20600 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/otp_internal.beambin10112 -> 10360 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/proc_lib.beambin10460 -> 10460 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/qlc.beambin70184 -> 70160 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/qlc_pt.beambin73720 -> 73720 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/re.beambin13876 -> 13884 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/shell.beambin30252 -> 30248 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/sofs.beambin40820 -> 40816 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/stdlib.app2
-rw-r--r--bootstrap/lib/stdlib/ebin/stdlib.appup2
-rw-r--r--bootstrap/lib/stdlib/ebin/string.beambin4928 -> 5176 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/supervisor.beambin23484 -> 24176 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/unicode.beambin11608 -> 11616 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/win32reg.beambin5652 -> 5632 bytes
-rw-r--r--bootstrap/lib/stdlib/ebin/zip.beambin26848 -> 26844 bytes
-rw-r--r--erts/autoconf/vxworks/sed.general2
-rw-r--r--erts/configure.in38
-rw-r--r--erts/doc/src/notes.xml59
-rw-r--r--erts/emulator/Makefile.in20
-rw-r--r--erts/emulator/beam/benchmark.c65
-rw-r--r--erts/emulator/beam/benchmark.h39
-rw-r--r--erts/emulator/beam/bif.tab1
-rw-r--r--erts/emulator/beam/erl_driver.h5
-rw-r--r--erts/emulator/beam/erl_math.c18
-rw-r--r--erts/emulator/beam/global.h2
-rw-r--r--erts/emulator/beam/io.c27
-rw-r--r--erts/emulator/drivers/common/inet_drv.c104
-rw-r--r--erts/emulator/hipe/hipe_amd64.c18
-rw-r--r--erts/emulator/hipe/hipe_arch.h1
-rw-r--r--erts/emulator/hipe/hipe_arm.c2
-rw-r--r--erts/emulator/hipe/hipe_arm.h4
-rw-r--r--erts/emulator/hipe/hipe_arm_bifs.m473
-rw-r--r--erts/emulator/hipe/hipe_arm_glue.S41
-rw-r--r--erts/emulator/hipe/hipe_bif0.c22
-rw-r--r--erts/emulator/hipe/hipe_bif0.tab2
-rw-r--r--erts/emulator/hipe/hipe_bif1.c53
-rw-r--r--erts/emulator/hipe/hipe_perfctr.c229
-rw-r--r--erts/emulator/hipe/hipe_perfctr.h23
-rw-r--r--erts/emulator/hipe/hipe_perfctr.tab25
-rw-r--r--erts/emulator/hipe/hipe_ppc.c4
-rw-r--r--erts/emulator/hipe/hipe_ppc.h4
-rw-r--r--erts/emulator/hipe/hipe_sparc.c17
-rw-r--r--erts/emulator/hipe/hipe_sparc.h4
-rw-r--r--erts/emulator/hipe/hipe_x86.c17
-rw-r--r--erts/emulator/hipe/hipe_x86.h4
-rw-r--r--erts/emulator/sys/unix/erl_child_setup.c13
-rw-r--r--erts/emulator/sys/unix/sys.c64
-rw-r--r--erts/emulator/test/trace_bif_SUITE.erl4
-rw-r--r--lib/asn1/src/Makefile1
-rw-r--r--lib/asn1/src/asn1_db.erl8
-rw-r--r--lib/asn1/src/asn1_records.hrl16
-rw-r--r--lib/asn1/src/asn1ct.erl71
-rw-r--r--lib/asn1/src/asn1ct_check.erl5271
-rw-r--r--lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl13
-rw-r--r--lib/asn1/src/asn1ct_constructed_per.erl155
-rw-r--r--lib/asn1/src/asn1ct_gen.erl75
-rw-r--r--lib/asn1/src/asn1ct_gen_ber_bin_v2.erl109
-rw-r--r--lib/asn1/src/asn1ct_imm.erl9
-rw-r--r--lib/asn1/src/asn1ct_parser.yrl1177
-rw-r--r--lib/asn1/src/asn1ct_parser2.erl2517
-rw-r--r--lib/asn1/src/asn1ct_tok.erl332
-rw-r--r--lib/asn1/test/Makefile10
-rw-r--r--lib/asn1/test/asn1_SUITE.erl120
-rw-r--r--lib/asn1/test/asn1_SUITE_data/BadTypeEnding.asn6
-rw-r--r--lib/asn1/test/asn1_SUITE_data/BadValueAssignment1.asn18
-rw-r--r--lib/asn1/test/asn1_SUITE_data/BadValueAssignment2.asn18
-rw-r--r--lib/asn1/test/asn1_SUITE_data/BadValueSet.asn19
-rw-r--r--lib/asn1/test/asn1_SUITE_data/CCSNARG3.asn2
-rw-r--r--lib/asn1/test/asn1_SUITE_data/ChoExtension.asn16
-rw-r--r--lib/asn1/test/asn1_SUITE_data/ChoiceBadExtension.asn127
-rw-r--r--lib/asn1/test/asn1_SUITE_data/ConstraintEquivalence.asn160
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Constraints.py44
-rw-r--r--lib/asn1/test/asn1_SUITE_data/CoverParser.asn157
-rw-r--r--lib/asn1/test/asn1_SUITE_data/DoubleEllipses.asn19
-rw-r--r--lib/asn1/test/asn1_SUITE_data/EnumExt.asn12
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Example.asn120
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Export1.asn7
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Exporting.asn118
-rw-r--r--lib/asn1/test/asn1_SUITE_data/ExtensibilityImplied.asn130
-rw-r--r--lib/asn1/test/asn1_SUITE_data/IllegalExport.asn17
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Importing.asn120
-rw-r--r--lib/asn1/test/asn1_SUITE_data/InfObj.asn188
-rw-r--r--lib/asn1/test/asn1_SUITE_data/InfObjExtract.asn1136
-rw-r--r--lib/asn1/test/asn1_SUITE_data/MissingEnd.asn15
-rw-r--r--lib/asn1/test/asn1_SUITE_data/ObjIdValues.asn11
-rw-r--r--lib/asn1/test/asn1_SUITE_data/ParamBasic.asn133
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Prim.asn12
-rw-r--r--lib/asn1/test/asn1_SUITE_data/SelectionType.asn8
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Seq.py5
-rw-r--r--lib/asn1/test/asn1_SUITE_data/SeqOptional2.asn6
-rw-r--r--lib/asn1/test/asn1_SUITE_data/SequenceBadComma.asn10
-rw-r--r--lib/asn1/test/asn1_SUITE_data/SequenceBadComponentName.asn110
-rw-r--r--lib/asn1/test/asn1_SUITE_data/SequenceBadComponentType.asn110
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Syntax.py10
-rw-r--r--lib/asn1/test/asn1_SUITE_data/ValueTest.asn96
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/ACSE-1.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/ACSE-1.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/AlgorithmInformation-2009.asn1466
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/AttributeCertificateVersion1-2009.asn159
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/AuthenticationFramework.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/AuthenticationFramework.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/BasicAccessControl.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/BasicAccessControl.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/CertificateExtensions.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/CertificateExtensions.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Character-Coding-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Character-Coding-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Character-Presentation-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Character-Presentation-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Character-Profile-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Character-Profile-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Colour-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Colour-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntax-2009.asn1463
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntaxAlgorithms-2009.asn1248
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DOR-definition.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DOR-definition.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DSAOperationalAttributeTypes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DSAOperationalAttributeTypes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Default-Value-Lists.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Default-Value-Lists.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryAbstractService.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DirectoryAbstractService.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryAccessProtocol.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DirectoryAccessProtocol.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryInformationShadowProtocol.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DirectoryInformationShadowProtocol.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryOperationalBindingManagementProtocol.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DirectoryOperationalBindingManagementProtocol.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryOperationalBindingTypes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DirectoryOperationalBindingTypes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryProtectionMappings.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DirectoryProtectionMappings.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryShadowAbstractService.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DirectoryShadowAbstractService.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DirectorySystemProtocol.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DirectorySystemProtocol.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DistributedOperations.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DistributedOperations.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Document-Profile-Descriptor.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Document-Profile-Descriptor.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/EnhancedSecurity.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/EnhancedSecurity.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/EnrollmentMessageSyntax-2009.asn1543
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/External-References.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/External-References.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/GULSProtectionMappings.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/GULSProtectionMappings.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/GenericProtectingTransferSyntax.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/GenericProtectingTransferSyntax.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Coding-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Coding-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Presentation-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Presentation-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Profile-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Profile-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/GulsSecurityExchanges.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/GulsSecurityExchanges.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/GulsSecurityTransformations.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/GulsSecurityTransformations.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/HierarchicalOperationalBindings.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/HierarchicalOperationalBindings.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSAbstractService.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSAbstractService.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSAutoActionTypes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSAutoActionTypes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedBodyPartTypes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedBodyPartTypes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedBodyPartTypes2.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedBodyPartTypes2.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedVoiceBodyPartType.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedVoiceBodyPartType.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSFileTransferBodyPartType.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSFileTransferBodyPartType.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSForwardedContentBodyPartType.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSForwardedContentBodyPartType.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSForwardedReportBodyPartType.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSForwardedReportBodyPartType.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSFunctionalObjects.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSFunctionalObjects.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSHeadingExtensions.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSHeadingExtensions.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSInformationObjects.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSInformationObjects.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSMessageStoreAttributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSMessageStoreAttributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSObjectIdentifiers.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSObjectIdentifiers.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSObjectIdentifiers2.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSObjectIdentifiers2.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSSecurityExtensions.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSSecurityExtensions.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSUpperBounds.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSUpperBounds.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/ISO-STANDARD-9541-FONT-ATTRIBUTE-SET.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/ISO-STANDARD-9541-FONT-ATTRIBUTE-SET.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/ISO8571-FTAM.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/ISO8571-FTAM.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/ISO9541-SN.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/ISO9541-SN.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Identifiers-and-Expressions.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Identifiers-and-Expressions.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/InformationFramework.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/InformationFramework.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Interchange-Data-Elements.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Interchange-Data-Elements.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Layout-Descriptors.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Layout-Descriptors.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Link-Descriptors.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Link-Descriptors.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Location-Expressions.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Location-Expressions.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Logical-Descriptors.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Logical-Descriptors.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MHSObjectIdentifiers.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MHSObjectIdentifiers.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MHSProtocolObjectIdentifiers.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MHSProtocolObjectIdentifiers.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MSAbstractService.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MSAbstractService.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MSAccessProtocol.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MSAccessProtocol.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MSGeneralAttributeTypes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MSGeneralAttributeTypes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MSGeneralAutoActionTypes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MSGeneralAutoActionTypes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MSMatchingRules.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MSMatchingRules.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MSObjectIdentifiers.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MSObjectIdentifiers.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MSUpperBounds.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MSUpperBounds.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MTAAbstractService.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MTAAbstractService.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MTSAbstractService.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MTSAbstractService.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MTSAbstractService88.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MTSAbstractService88.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MTSAccessProtocol.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MTSAccessProtocol.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MTSObjectIdentifiers.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MTSObjectIdentifiers.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MTSUpperBounds.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MTSUpperBounds.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Notation.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Notation.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/OCSP-2009.asn1183
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/OLD-PKCS7.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/PKCS7.asn)2
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/ObjectIdentifiers.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/ObjectIdentifiers.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/OperationalBindingManagement.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/OperationalBindingManagement.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-10.asn156
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-12.asn1174
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-5.asn1202
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-7.asn1326
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-8.asn161
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-9.asn1391
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKCS7BodyPartType.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/PKCS7BodyPartType.asn)2
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-CommonTypes-2009.asn1166
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-X400Address-2009.asn1300
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1-PSS-OAEP-Algorithms-2009.asn1308
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Explicit-2009.asn1415
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Implicit-2009.asn1447
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAlgs-2009.asn1528
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAttributeCertificate-2009.asn1292
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCMP-2009.asn1495
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCRMF-2009.asn1409
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Protected-Part-Descriptors.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Protected-Part-Descriptors.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/ProtocolObjectIdentifiers.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/ProtocolObjectIdentifiers.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Coding-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Coding-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Presentation-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Presentation-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Profile-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Profile-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Reliable-Transfer-APDU.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Reliable-Transfer-APDU.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Abstract-Syntaxes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Abstract-Syntaxes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Generic-ROS-PDUs.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Generic-ROS-PDUs.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Information-Objects-extensions.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Information-Objects-extensions.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Information-Objects.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Information-Objects.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Realizations.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Realizations.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Useful-Definitions.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Useful-Definitions.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/SCVP-2009.asn1608
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/SecureMimeMessageV3dot1-2009.asn1122
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/SelectedAttributeTypes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/SelectedAttributeTypes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/SeseAPDUs.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/SeseAPDUs.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/SpkmGssTokens.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/SpkmGssTokens.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Style-Descriptors.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Style-Descriptors.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Subprofiles.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Subprofiles.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Temporal-Relationships.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Temporal-Relationships.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Text-Units.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Text-Units.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/UpperBounds.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/UpperBounds.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/UsefulDefinitions.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/UsefulDefinitions.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Videotex-Coding-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Videotex-Coding-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_test_lib.erl25
-rw-r--r--lib/asn1/test/error_SUITE.erl725
-rw-r--r--lib/asn1/test/syntax_SUITE.erl340
-rw-r--r--lib/asn1/test/testChoExtension.erl5
-rw-r--r--lib/asn1/test/testConstraints.erl22
-rw-r--r--lib/asn1/test/testDoubleEllipses.erl6
-rw-r--r--lib/asn1/test/testEnumExt.erl28
-rw-r--r--lib/asn1/test/testExtensibilityImplied.erl29
-rw-r--r--lib/asn1/test/testImporting.erl34
-rw-r--r--lib/asn1/test/testInfObj.erl39
-rw-r--r--lib/asn1/test/testInfObjExtract.erl72
-rw-r--r--lib/asn1/test/testParamBasic.erl8
-rw-r--r--lib/asn1/test/testPrim.erl5
-rw-r--r--lib/asn1/test/testRfcs.erl75
-rw-r--r--lib/asn1/test/testSelectionTypes.erl28
-rw-r--r--lib/asn1/test/testUniqueObjectSets.erl175
-rw-r--r--lib/asn1/test/testValueTest.erl114
-rw-r--r--lib/asn1/test/testX420.erl93
-rw-r--r--lib/asn1/test/test_compile_options.erl39
-rw-r--r--lib/common_test/src/ct_cover.erl127
-rw-r--r--lib/common_test/src/ct_release_test.erl137
-rw-r--r--lib/common_test/src/ct_run.erl21
-rw-r--r--lib/common_test/test/ct_cover_SUITE.erl79
-rw-r--r--lib/common_test/test/ct_cover_SUITE_data/cover_SUITE.erl4
-rw-r--r--lib/compiler/src/Makefile2
-rw-r--r--lib/compiler/src/beam_block.erl2
-rw-r--r--lib/compiler/src/beam_bool.erl75
-rw-r--r--lib/compiler/src/beam_clean.erl25
-rw-r--r--lib/compiler/src/beam_dead.erl676
-rw-r--r--lib/compiler/src/beam_flatten.erl4
-rw-r--r--lib/compiler/src/beam_jump.erl63
-rw-r--r--lib/compiler/src/beam_split.erl5
-rw-r--r--lib/compiler/src/beam_type.erl1
-rw-r--r--lib/compiler/src/beam_utils.erl26
-rw-r--r--lib/compiler/src/beam_validator.erl305
-rw-r--r--lib/compiler/src/beam_z.erl14
-rw-r--r--lib/compiler/src/cerl.erl59
-rw-r--r--lib/compiler/src/cerl_clauses.erl46
-rw-r--r--lib/compiler/src/compiler.app.src1
-rw-r--r--lib/compiler/src/core_lib.erl64
-rw-r--r--lib/compiler/src/core_lint.erl2
-rw-r--r--lib/compiler/src/core_parse.hrl3
-rw-r--r--lib/compiler/src/core_parse.yrl98
-rw-r--r--lib/compiler/src/core_pp.erl22
-rw-r--r--lib/compiler/src/core_scan.erl6
-rw-r--r--lib/compiler/src/erl_bifs.erl1
-rw-r--r--lib/compiler/src/sys_core_fold.erl1094
-rw-r--r--lib/compiler/src/sys_core_fold_lists.erl386
-rw-r--r--lib/compiler/src/sys_core_inline.erl8
-rw-r--r--lib/compiler/src/v3_core.erl227
-rw-r--r--lib/compiler/src/v3_kernel.erl122
-rw-r--r--lib/compiler/test/Makefile3
-rw-r--r--lib/compiler/test/andor_SUITE.erl10
-rw-r--r--lib/compiler/test/beam_utils_SUITE.erl236
-rw-r--r--lib/compiler/test/beam_validator_SUITE.erl194
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S6
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S2
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/bin_match.S64
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/compiler_bug.S38
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/dead_code.S25
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/erl_prim_loader.beambin17460 -> 0 bytes
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/freg_range.S4
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/freg_state.S2
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S14
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/illegal_instruction.S26
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/map_field_lists.S29
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S3
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S4
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/stack.S4
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/undef_label.S22
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/uninit.S16
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/xrange.S4
-rw-r--r--lib/compiler/test/bs_bit_binaries_SUITE.erl2
-rw-r--r--lib/compiler/test/bs_construct_SUITE.erl2
-rw-r--r--lib/compiler/test/bs_match_SUITE.erl52
-rw-r--r--lib/compiler/test/compilation_SUITE.erl6
-rw-r--r--lib/compiler/test/compile_SUITE.erl89
-rw-r--r--lib/compiler/test/compile_SUITE_data/dialyzer_test.erl39
-rw-r--r--lib/compiler/test/core_SUITE_data/map_core_test.core12
-rw-r--r--lib/compiler/test/core_fold_SUITE.erl73
-rw-r--r--lib/compiler/test/float_SUITE.erl2
-rw-r--r--lib/compiler/test/guard_SUITE.erl263
-rw-r--r--lib/compiler/test/lc_SUITE.erl102
-rw-r--r--lib/compiler/test/map_SUITE.erl11
-rw-r--r--lib/compiler/test/match_SUITE.erl38
-rw-r--r--lib/compiler/test/misc_SUITE.erl17
-rw-r--r--lib/compiler/test/receive_SUITE.erl38
-rw-r--r--lib/compiler/test/record_SUITE.erl8
-rw-r--r--lib/compiler/test/test_lib.erl23
-rw-r--r--lib/compiler/test/trycatch_SUITE.erl20
-rw-r--r--lib/compiler/test/warnings_SUITE.erl53
-rw-r--r--lib/crypto/c_src/crypto.c81
-rw-r--r--lib/crypto/doc/src/crypto.xml36
-rw-r--r--lib/crypto/src/crypto.erl37
-rw-r--r--lib/crypto/test/crypto_SUITE.erl145
-rw-r--r--lib/debugger/src/dbg_wx_settings.erl10
-rw-r--r--lib/dialyzer/doc/src/dialyzer.xml73
-rw-r--r--lib/dialyzer/src/dialyzer.erl8
-rw-r--r--lib/dialyzer/src/dialyzer.hrl12
-rw-r--r--lib/dialyzer/src/dialyzer_analysis_callgraph.erl156
-rw-r--r--lib/dialyzer/src/dialyzer_behaviours.erl18
-rw-r--r--lib/dialyzer/src/dialyzer_cl.erl21
-rw-r--r--lib/dialyzer/src/dialyzer_codeserver.erl32
-rw-r--r--lib/dialyzer/src/dialyzer_contracts.erl45
-rw-r--r--lib/dialyzer/src/dialyzer_dataflow.erl149
-rw-r--r--lib/dialyzer/src/dialyzer_options.erl4
-rw-r--r--lib/dialyzer/src/dialyzer_races.erl17
-rw-r--r--lib/dialyzer/src/dialyzer_succ_typings.erl46
-rw-r--r--lib/dialyzer/src/dialyzer_typesig.erl2
-rw-r--r--lib/dialyzer/src/dialyzer_utils.erl197
-rw-r--r--lib/dialyzer/test/race_SUITE_data/src/ets_insert_args1_suppressed.erl19
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/blame_contract_range_suppressed2
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/request12
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/suppress_request6
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/blame_contract_range_suppressed.erl15
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/maps_redef2.erl23
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/request1.erl12
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/suppress_request.erl50
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/suppression1.erl33
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/suppression2.erl32
-rw-r--r--lib/edoc/doc/overview.edoc23
-rw-r--r--lib/edoc/include/edoc_doclet.hrl11
-rw-r--r--lib/edoc/priv/edoc.dtd9
-rw-r--r--lib/edoc/priv/stylesheet.css4
-rw-r--r--lib/edoc/src/edoc.erl174
-rw-r--r--lib/edoc/src/edoc.hrl7
-rw-r--r--lib/edoc/src/edoc_data.erl33
-rw-r--r--lib/edoc/src/edoc_doclet.erl106
-rw-r--r--lib/edoc/src/edoc_extract.erl19
-rw-r--r--lib/edoc/src/edoc_layout.erl26
-rw-r--r--lib/edoc/src/edoc_lib.erl157
-rw-r--r--lib/edoc/src/edoc_macros.erl4
-rw-r--r--lib/edoc/src/edoc_parser.yrl10
-rw-r--r--lib/edoc/src/edoc_refs.erl72
-rw-r--r--lib/edoc/src/edoc_run.erl30
-rw-r--r--lib/edoc/src/edoc_tags.erl22
-rw-r--r--lib/edoc/src/otpsgml_layout.erl24
-rw-r--r--lib/edoc/test/edoc_SUITE.erl21
-rw-r--r--lib/edoc/test/edoc_SUITE_data/myapp/doc/.dummy0
-rw-r--r--lib/edoc/test/edoc_SUITE_data/myapp/src/a.erl1
-rw-r--r--lib/edoc/test/edoc_SUITE_data/myapp/src/src_1/b.erl1
-rw-r--r--lib/eldap/src/eldap.erl2
-rw-r--r--lib/erl_docgen/priv/bin/specs_gen.escript2
-rwxr-xr-xlib/erl_docgen/priv/bin/xml_from_edoc.escript2
-rw-r--r--lib/hipe/cerl/erl_bif_types.erl10
-rw-r--r--lib/hipe/ppc/hipe_rtl_to_ppc.erl14
-rw-r--r--lib/hipe/rtl/hipe_rtl.erl18
-rw-r--r--lib/hipe/sparc/hipe_rtl_to_sparc.erl10
-rw-r--r--lib/hipe/x86/hipe_rtl_to_x86.erl15
-rw-r--r--lib/inets/doc/src/http_uri.xml11
-rw-r--r--lib/inets/doc/src/httpd.xml13
-rw-r--r--lib/inets/doc/src/notes.xml35
-rw-r--r--lib/inets/examples/httpd_load_test/hdlt_slave.erl2
-rw-r--r--lib/inets/src/http_client/httpc_cookie.erl20
-rw-r--r--lib/inets/src/http_lib/http_internal.hrl3
-rw-r--r--lib/inets/src/http_lib/http_request.erl26
-rw-r--r--lib/inets/src/http_lib/http_uri.erl35
-rw-r--r--lib/inets/src/http_server/httpd_conf.erl24
-rw-r--r--lib/inets/src/http_server/httpd_request.erl102
-rw-r--r--lib/inets/src/http_server/httpd_request_handler.erl32
-rw-r--r--lib/inets/src/http_server/mod_alias.erl14
-rw-r--r--lib/inets/test/http_format_SUITE.erl16
-rw-r--r--lib/inets/test/httpc_SUITE.erl52
-rw-r--r--lib/inets/test/httpd_SUITE.erl45
-rw-r--r--lib/inets/test/uri_SUITE.erl37
-rw-r--r--lib/inets/vsn.mk2
-rw-r--r--lib/kernel/doc/src/gen_sctp.xml2
-rw-r--r--lib/kernel/doc/src/inet.xml10
-rw-r--r--lib/kernel/src/gen_udp.erl2
-rw-r--r--lib/kernel/src/hipe_unified_loader.erl17
-rw-r--r--lib/kernel/src/inet.erl4
-rw-r--r--lib/kernel/src/standard_error.erl155
-rw-r--r--lib/kernel/test/Makefile3
-rw-r--r--lib/kernel/test/inet_SUITE.erl19
-rw-r--r--lib/kernel/test/standard_error_SUITE.erl38
-rw-r--r--lib/mnesia/doc/src/Mnesia_chap5.xmlsrc1
-rw-r--r--lib/mnesia/doc/src/mnesia.xml22
-rw-r--r--lib/mnesia/src/mnesia_controller.erl29
-rw-r--r--lib/mnesia/src/mnesia_dumper.erl60
-rw-r--r--lib/mnesia/src/mnesia_loader.erl7
-rw-r--r--lib/mnesia/src/mnesia_monitor.erl4
-rw-r--r--lib/mnesia/src/mnesia_recover.erl31
-rw-r--r--lib/observer/doc/src/observer_ug.xml23
-rw-r--r--lib/observer/src/observer_html_lib.erl8
-rw-r--r--lib/observer/src/observer_procinfo.erl80
-rw-r--r--lib/observer/src/observer_wx.erl125
-rw-r--r--lib/observer/test/observer_SUITE.erl35
-rw-r--r--lib/os_mon/c_src/memsup.c2
-rw-r--r--lib/parsetools/include/leexinc.hrl8
-rw-r--r--lib/parsetools/src/yecc.erl18
-rw-r--r--lib/parsetools/test/yecc_SUITE.erl18
-rw-r--r--lib/sasl/test/release_handler_SUITE.erl18
-rw-r--r--lib/sasl/test/release_handler_SUITE_data/regexp_appup/app1/ebin/app1.app29
-rw-r--r--lib/sasl/test/release_handler_SUITE_data/regexp_appup/app1/ebin/app1.appup23
-rw-r--r--lib/ssh/doc/src/ssh.xml4
-rw-r--r--lib/ssh/doc/src/using_ssh.xml2
-rw-r--r--lib/ssh/src/ssh_connection_handler.erl2
-rw-r--r--lib/ssh/src/ssh_sftpd.erl122
-rw-r--r--lib/ssh/test/ssh_basic_SUITE.erl7
-rw-r--r--lib/ssh/test/ssh_sftpd_SUITE.erl48
-rw-r--r--lib/ssl/doc/src/ssl.xml74
-rw-r--r--lib/ssl/doc/src/ssl_app.xml13
-rw-r--r--lib/ssl/src/dtls_record.erl4
-rw-r--r--lib/ssl/src/ssl.appup.src8
-rw-r--r--lib/ssl/src/ssl.erl25
-rw-r--r--lib/ssl/src/ssl_certificate.erl2
-rw-r--r--lib/ssl/src/ssl_cipher.erl72
-rw-r--r--lib/ssl/src/ssl_internal.hrl12
-rw-r--r--lib/ssl/src/ssl_manager.erl95
-rw-r--r--lib/ssl/src/ssl_pkix_db.erl33
-rw-r--r--lib/ssl/src/ssl_record.erl10
-rw-r--r--lib/ssl/src/ssl_v3.erl5
-rw-r--r--lib/ssl/src/tls_connection.erl7
-rw-r--r--lib/ssl/src/tls_record.erl25
-rw-r--r--lib/ssl/src/tls_v1.erl10
-rw-r--r--lib/ssl/test/Makefile4
-rw-r--r--lib/ssl/test/ssl_basic_SUITE.erl81
-rw-r--r--lib/ssl/test/ssl_certificate_verify_SUITE.erl4
-rw-r--r--lib/ssl/test/ssl_cipher_SUITE.erl189
-rw-r--r--lib/ssl/test/ssl_pem_cache_SUITE.erl127
-rw-r--r--lib/ssl/test/ssl_test_lib.erl10
-rw-r--r--lib/ssl/test/ssl_upgrade_SUITE.erl162
-rw-r--r--lib/ssl/vsn.mk2
-rw-r--r--lib/stdlib/doc/src/math.xml1
-rw-r--r--lib/stdlib/src/c.erl17
-rw-r--r--lib/stdlib/src/dict.erl6
-rw-r--r--lib/stdlib/src/erl_lint.erl4
-rw-r--r--lib/stdlib/src/math.erl7
-rw-r--r--lib/stdlib/src/otp_internal.erl13
-rw-r--r--lib/stdlib/src/string.erl46
-rw-r--r--lib/stdlib/test/stdlib_SUITE.erl15
-rw-r--r--lib/stdlib/test/string_SUITE.erl40
-rw-r--r--lib/syntax_tools/src/epp_dodger.erl6
-rw-r--r--lib/syntax_tools/src/erl_prettypr.erl33
-rw-r--r--lib/syntax_tools/src/erl_syntax.erl208
-rw-r--r--lib/syntax_tools/src/erl_syntax_lib.erl106
-rw-r--r--lib/syntax_tools/src/erl_tidy.erl30
-rw-r--r--lib/syntax_tools/src/igor.erl23
-rw-r--r--lib/test_server/src/erl2html2.erl20
-rw-r--r--lib/test_server/src/test_server.erl142
-rw-r--r--lib/test_server/src/test_server_ctrl.erl9
-rw-r--r--lib/test_server/test/erl2html2_SUITE.erl2
-rw-r--r--lib/tools/doc/src/cover.xml109
-rw-r--r--lib/tools/src/cover.erl825
-rw-r--r--lib/tools/src/lcnt.erl70
-rw-r--r--lib/tools/test/cover_SUITE.erl296
-rw-r--r--lib/wx/src/wxe_server.erl14
-rw-r--r--lib/wx/test/wx_event_SUITE.erl8
-rw-r--r--otp_versions.table1
-rw-r--r--system/doc/getting_started/conc_prog.xml20
-rw-r--r--system/doc/getting_started/records_macros.xml4
-rw-r--r--system/doc/getting_started/robustness.xml8
-rw-r--r--system/doc/getting_started/seq_prog.xml43
-rw-r--r--system/doc/programming_examples/fun_test.erl8
-rw-r--r--system/doc/reference_manual/modules.xml3
-rw-r--r--system/doc/reference_manual/typespec.xml8
-rw-r--r--system/doc/system_principles/system_principles.xml2
-rw-r--r--xcomp/erl-xcomp-powerpc-dso-linux-gnu.conf28
587 files changed, 20980 insertions, 11884 deletions
diff --git a/bootstrap/bin/start.boot b/bootstrap/bin/start.boot
index 7e93eb7f7a..f8a7250187 100644
--- a/bootstrap/bin/start.boot
+++ b/bootstrap/bin/start.boot
Binary files differ
diff --git a/bootstrap/bin/start_clean.boot b/bootstrap/bin/start_clean.boot
index 7e93eb7f7a..f8a7250187 100644
--- a/bootstrap/bin/start_clean.boot
+++ b/bootstrap/bin/start_clean.boot
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/beam_asm.beam b/bootstrap/lib/compiler/ebin/beam_asm.beam
index 0758874641..b217690700 100644
--- a/bootstrap/lib/compiler/ebin/beam_asm.beam
+++ b/bootstrap/lib/compiler/ebin/beam_asm.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/beam_block.beam b/bootstrap/lib/compiler/ebin/beam_block.beam
index 0eca719d0d..8ecfe4424e 100644
--- a/bootstrap/lib/compiler/ebin/beam_block.beam
+++ b/bootstrap/lib/compiler/ebin/beam_block.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/beam_bool.beam b/bootstrap/lib/compiler/ebin/beam_bool.beam
index f339e76fce..6454719448 100644
--- a/bootstrap/lib/compiler/ebin/beam_bool.beam
+++ b/bootstrap/lib/compiler/ebin/beam_bool.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/beam_clean.beam b/bootstrap/lib/compiler/ebin/beam_clean.beam
index f6a9ff987d..086720f13c 100644
--- a/bootstrap/lib/compiler/ebin/beam_clean.beam
+++ b/bootstrap/lib/compiler/ebin/beam_clean.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/beam_dead.beam b/bootstrap/lib/compiler/ebin/beam_dead.beam
index dc924a2905..1e6528b92c 100644
--- a/bootstrap/lib/compiler/ebin/beam_dead.beam
+++ b/bootstrap/lib/compiler/ebin/beam_dead.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/beam_disasm.beam b/bootstrap/lib/compiler/ebin/beam_disasm.beam
index f050f39ca0..b63864c96c 100644
--- a/bootstrap/lib/compiler/ebin/beam_disasm.beam
+++ b/bootstrap/lib/compiler/ebin/beam_disasm.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/beam_except.beam b/bootstrap/lib/compiler/ebin/beam_except.beam
index 0de15769c4..f4fccfeef7 100644
--- a/bootstrap/lib/compiler/ebin/beam_except.beam
+++ b/bootstrap/lib/compiler/ebin/beam_except.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/beam_flatten.beam b/bootstrap/lib/compiler/ebin/beam_flatten.beam
index de1a719a4e..c1d5604349 100644
--- a/bootstrap/lib/compiler/ebin/beam_flatten.beam
+++ b/bootstrap/lib/compiler/ebin/beam_flatten.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/beam_jump.beam b/bootstrap/lib/compiler/ebin/beam_jump.beam
index 2b839bc997..3eef625909 100644
--- a/bootstrap/lib/compiler/ebin/beam_jump.beam
+++ b/bootstrap/lib/compiler/ebin/beam_jump.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/beam_receive.beam b/bootstrap/lib/compiler/ebin/beam_receive.beam
index 5da32b5db6..aec35c43eb 100644
--- a/bootstrap/lib/compiler/ebin/beam_receive.beam
+++ b/bootstrap/lib/compiler/ebin/beam_receive.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/beam_split.beam b/bootstrap/lib/compiler/ebin/beam_split.beam
index e99c0a69b9..4821d92c96 100644
--- a/bootstrap/lib/compiler/ebin/beam_split.beam
+++ b/bootstrap/lib/compiler/ebin/beam_split.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/beam_type.beam b/bootstrap/lib/compiler/ebin/beam_type.beam
index 7f80c1a691..53cb772708 100644
--- a/bootstrap/lib/compiler/ebin/beam_type.beam
+++ b/bootstrap/lib/compiler/ebin/beam_type.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/beam_utils.beam b/bootstrap/lib/compiler/ebin/beam_utils.beam
index 659bb08d48..1490435875 100644
--- a/bootstrap/lib/compiler/ebin/beam_utils.beam
+++ b/bootstrap/lib/compiler/ebin/beam_utils.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/beam_validator.beam b/bootstrap/lib/compiler/ebin/beam_validator.beam
index ba1ea73dc4..d5dcd7bba0 100644
--- a/bootstrap/lib/compiler/ebin/beam_validator.beam
+++ b/bootstrap/lib/compiler/ebin/beam_validator.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/beam_z.beam b/bootstrap/lib/compiler/ebin/beam_z.beam
index ad551bcc8c..4738099dc3 100644
--- a/bootstrap/lib/compiler/ebin/beam_z.beam
+++ b/bootstrap/lib/compiler/ebin/beam_z.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/cerl.beam b/bootstrap/lib/compiler/ebin/cerl.beam
index 5e1c698ae8..4ae917789b 100644
--- a/bootstrap/lib/compiler/ebin/cerl.beam
+++ b/bootstrap/lib/compiler/ebin/cerl.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/cerl_inline.beam b/bootstrap/lib/compiler/ebin/cerl_inline.beam
index 7328f75f34..24a36e139d 100644
--- a/bootstrap/lib/compiler/ebin/cerl_inline.beam
+++ b/bootstrap/lib/compiler/ebin/cerl_inline.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/compile.beam b/bootstrap/lib/compiler/ebin/compile.beam
index eb682b953c..5df7d4aa86 100644
--- a/bootstrap/lib/compiler/ebin/compile.beam
+++ b/bootstrap/lib/compiler/ebin/compile.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/compiler.app b/bootstrap/lib/compiler/ebin/compiler.app
index 5f15db4437..d1caa3a348 100644
--- a/bootstrap/lib/compiler/ebin/compiler.app
+++ b/bootstrap/lib/compiler/ebin/compiler.app
@@ -18,7 +18,7 @@
{application, compiler,
[{description, "ERTS CXC 138 10"},
- {vsn, "5.0.2"},
+ {vsn, "5.0.3"},
{modules, [
beam_a,
beam_asm,
@@ -56,6 +56,7 @@
rec_env,
sys_core_dsetel,
sys_core_fold,
+ sys_core_fold_lists,
sys_core_inline,
sys_pre_attributes,
sys_pre_expand,
diff --git a/bootstrap/lib/compiler/ebin/compiler.appup b/bootstrap/lib/compiler/ebin/compiler.appup
index bde95b1878..3c80da02b5 100644
--- a/bootstrap/lib/compiler/ebin/compiler.appup
+++ b/bootstrap/lib/compiler/ebin/compiler.appup
@@ -15,7 +15,7 @@
%% under the License.
%%
%% %CopyrightEnd%
-{"5.0.2",
+{"5.0.3",
[{<<".*">>,[{restart_application, compiler}]}],
[{<<".*">>,[{restart_application, compiler}]}]
}.
diff --git a/bootstrap/lib/compiler/ebin/core_lib.beam b/bootstrap/lib/compiler/ebin/core_lib.beam
index bb38837049..ec87099635 100644
--- a/bootstrap/lib/compiler/ebin/core_lib.beam
+++ b/bootstrap/lib/compiler/ebin/core_lib.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/core_lint.beam b/bootstrap/lib/compiler/ebin/core_lint.beam
index 9f83408bae..b1cbbf030d 100644
--- a/bootstrap/lib/compiler/ebin/core_lint.beam
+++ b/bootstrap/lib/compiler/ebin/core_lint.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/core_parse.beam b/bootstrap/lib/compiler/ebin/core_parse.beam
index 121ef65275..6ea05bf28f 100644
--- a/bootstrap/lib/compiler/ebin/core_parse.beam
+++ b/bootstrap/lib/compiler/ebin/core_parse.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/core_pp.beam b/bootstrap/lib/compiler/ebin/core_pp.beam
index cb02293c1d..7ba89c698e 100644
--- a/bootstrap/lib/compiler/ebin/core_pp.beam
+++ b/bootstrap/lib/compiler/ebin/core_pp.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/core_scan.beam b/bootstrap/lib/compiler/ebin/core_scan.beam
index 73f588232a..c54edfc0e7 100644
--- a/bootstrap/lib/compiler/ebin/core_scan.beam
+++ b/bootstrap/lib/compiler/ebin/core_scan.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/erl_bifs.beam b/bootstrap/lib/compiler/ebin/erl_bifs.beam
index 27136581c1..d6c4dec85e 100644
--- a/bootstrap/lib/compiler/ebin/erl_bifs.beam
+++ b/bootstrap/lib/compiler/ebin/erl_bifs.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/rec_env.beam b/bootstrap/lib/compiler/ebin/rec_env.beam
index 4bd37b16c0..c49a2e23e9 100644
--- a/bootstrap/lib/compiler/ebin/rec_env.beam
+++ b/bootstrap/lib/compiler/ebin/rec_env.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/sys_core_dsetel.beam b/bootstrap/lib/compiler/ebin/sys_core_dsetel.beam
index 29dfdfb755..14c69eec6c 100644
--- a/bootstrap/lib/compiler/ebin/sys_core_dsetel.beam
+++ b/bootstrap/lib/compiler/ebin/sys_core_dsetel.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/sys_core_fold.beam b/bootstrap/lib/compiler/ebin/sys_core_fold.beam
index cce0ac7832..bc15a9d558 100644
--- a/bootstrap/lib/compiler/ebin/sys_core_fold.beam
+++ b/bootstrap/lib/compiler/ebin/sys_core_fold.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/sys_core_fold_lists.beam b/bootstrap/lib/compiler/ebin/sys_core_fold_lists.beam
new file mode 100644
index 0000000000..58ae7c0393
--- /dev/null
+++ b/bootstrap/lib/compiler/ebin/sys_core_fold_lists.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/sys_core_inline.beam b/bootstrap/lib/compiler/ebin/sys_core_inline.beam
index 1e358ee0ea..4f44297bee 100644
--- a/bootstrap/lib/compiler/ebin/sys_core_inline.beam
+++ b/bootstrap/lib/compiler/ebin/sys_core_inline.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/sys_pre_expand.beam b/bootstrap/lib/compiler/ebin/sys_pre_expand.beam
index 96f22d0970..0ad8bc7ae4 100644
--- a/bootstrap/lib/compiler/ebin/sys_pre_expand.beam
+++ b/bootstrap/lib/compiler/ebin/sys_pre_expand.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/v3_core.beam b/bootstrap/lib/compiler/ebin/v3_core.beam
index f492d3428c..72caca5336 100644
--- a/bootstrap/lib/compiler/ebin/v3_core.beam
+++ b/bootstrap/lib/compiler/ebin/v3_core.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/v3_kernel.beam b/bootstrap/lib/compiler/ebin/v3_kernel.beam
index b56f507f47..b12376611f 100644
--- a/bootstrap/lib/compiler/ebin/v3_kernel.beam
+++ b/bootstrap/lib/compiler/ebin/v3_kernel.beam
Binary files differ
diff --git a/bootstrap/lib/compiler/ebin/v3_kernel_pp.beam b/bootstrap/lib/compiler/ebin/v3_kernel_pp.beam
index 1798a279b0..d54716bbee 100644
--- a/bootstrap/lib/compiler/ebin/v3_kernel_pp.beam
+++ b/bootstrap/lib/compiler/ebin/v3_kernel_pp.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/application.beam b/bootstrap/lib/kernel/ebin/application.beam
index ba3a5ef81d..b8f7f156fe 100644
--- a/bootstrap/lib/kernel/ebin/application.beam
+++ b/bootstrap/lib/kernel/ebin/application.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/application_controller.beam b/bootstrap/lib/kernel/ebin/application_controller.beam
index 2a4525c27f..a86d7aaf98 100644
--- a/bootstrap/lib/kernel/ebin/application_controller.beam
+++ b/bootstrap/lib/kernel/ebin/application_controller.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/application_master.beam b/bootstrap/lib/kernel/ebin/application_master.beam
index 365bc7b53d..4ff90f7eff 100644
--- a/bootstrap/lib/kernel/ebin/application_master.beam
+++ b/bootstrap/lib/kernel/ebin/application_master.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/code_server.beam b/bootstrap/lib/kernel/ebin/code_server.beam
index 340c5feed9..aa75ae9bd1 100644
--- a/bootstrap/lib/kernel/ebin/code_server.beam
+++ b/bootstrap/lib/kernel/ebin/code_server.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/disk_log.beam b/bootstrap/lib/kernel/ebin/disk_log.beam
index 8a7d6a8388..6ed04672c2 100644
--- a/bootstrap/lib/kernel/ebin/disk_log.beam
+++ b/bootstrap/lib/kernel/ebin/disk_log.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/disk_log_1.beam b/bootstrap/lib/kernel/ebin/disk_log_1.beam
index 5193dfa53d..0b771924ac 100644
--- a/bootstrap/lib/kernel/ebin/disk_log_1.beam
+++ b/bootstrap/lib/kernel/ebin/disk_log_1.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/dist_ac.beam b/bootstrap/lib/kernel/ebin/dist_ac.beam
index 9d3f7548b6..25b155418c 100644
--- a/bootstrap/lib/kernel/ebin/dist_ac.beam
+++ b/bootstrap/lib/kernel/ebin/dist_ac.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/erl_epmd.beam b/bootstrap/lib/kernel/ebin/erl_epmd.beam
index bf171450e9..92d6387af3 100644
--- a/bootstrap/lib/kernel/ebin/erl_epmd.beam
+++ b/bootstrap/lib/kernel/ebin/erl_epmd.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/file.beam b/bootstrap/lib/kernel/ebin/file.beam
index 25f2506078..18ba4449b8 100644
--- a/bootstrap/lib/kernel/ebin/file.beam
+++ b/bootstrap/lib/kernel/ebin/file.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/file_io_server.beam b/bootstrap/lib/kernel/ebin/file_io_server.beam
index dfaa1b07f0..5e084a38f1 100644
--- a/bootstrap/lib/kernel/ebin/file_io_server.beam
+++ b/bootstrap/lib/kernel/ebin/file_io_server.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/global.beam b/bootstrap/lib/kernel/ebin/global.beam
index d25be3994c..351dc3db07 100644
--- a/bootstrap/lib/kernel/ebin/global.beam
+++ b/bootstrap/lib/kernel/ebin/global.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/group.beam b/bootstrap/lib/kernel/ebin/group.beam
index 412b8e0007..5ec6322df2 100644
--- a/bootstrap/lib/kernel/ebin/group.beam
+++ b/bootstrap/lib/kernel/ebin/group.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/hipe_unified_loader.beam b/bootstrap/lib/kernel/ebin/hipe_unified_loader.beam
index e6141fd7f1..e6d96c6ecb 100644
--- a/bootstrap/lib/kernel/ebin/hipe_unified_loader.beam
+++ b/bootstrap/lib/kernel/ebin/hipe_unified_loader.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/inet.beam b/bootstrap/lib/kernel/ebin/inet.beam
index 4d156b726c..50783dfdee 100644
--- a/bootstrap/lib/kernel/ebin/inet.beam
+++ b/bootstrap/lib/kernel/ebin/inet.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/inet6_tcp.beam b/bootstrap/lib/kernel/ebin/inet6_tcp.beam
index 036b0495c8..d080a1200b 100644
--- a/bootstrap/lib/kernel/ebin/inet6_tcp.beam
+++ b/bootstrap/lib/kernel/ebin/inet6_tcp.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/inet6_tcp_dist.beam b/bootstrap/lib/kernel/ebin/inet6_tcp_dist.beam
index 83305c1826..3f98206013 100644
--- a/bootstrap/lib/kernel/ebin/inet6_tcp_dist.beam
+++ b/bootstrap/lib/kernel/ebin/inet6_tcp_dist.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/inet_config.beam b/bootstrap/lib/kernel/ebin/inet_config.beam
index e858ac20f1..f31fb16742 100644
--- a/bootstrap/lib/kernel/ebin/inet_config.beam
+++ b/bootstrap/lib/kernel/ebin/inet_config.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/inet_db.beam b/bootstrap/lib/kernel/ebin/inet_db.beam
index 45e7d9e0d0..4b3ea51bb5 100644
--- a/bootstrap/lib/kernel/ebin/inet_db.beam
+++ b/bootstrap/lib/kernel/ebin/inet_db.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/inet_dns.beam b/bootstrap/lib/kernel/ebin/inet_dns.beam
index 41486bed9a..9fd86024e0 100644
--- a/bootstrap/lib/kernel/ebin/inet_dns.beam
+++ b/bootstrap/lib/kernel/ebin/inet_dns.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/inet_parse.beam b/bootstrap/lib/kernel/ebin/inet_parse.beam
index 90ccd8b440..56e77df9e1 100644
--- a/bootstrap/lib/kernel/ebin/inet_parse.beam
+++ b/bootstrap/lib/kernel/ebin/inet_parse.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/inet_res.beam b/bootstrap/lib/kernel/ebin/inet_res.beam
index 2636552e92..dbdc9134da 100644
--- a/bootstrap/lib/kernel/ebin/inet_res.beam
+++ b/bootstrap/lib/kernel/ebin/inet_res.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/inet_tcp.beam b/bootstrap/lib/kernel/ebin/inet_tcp.beam
index d854abd830..60e0d9f569 100644
--- a/bootstrap/lib/kernel/ebin/inet_tcp.beam
+++ b/bootstrap/lib/kernel/ebin/inet_tcp.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/inet_tcp_dist.beam b/bootstrap/lib/kernel/ebin/inet_tcp_dist.beam
index 2e54f1e1f4..5492ab0f8b 100644
--- a/bootstrap/lib/kernel/ebin/inet_tcp_dist.beam
+++ b/bootstrap/lib/kernel/ebin/inet_tcp_dist.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/kernel.app b/bootstrap/lib/kernel/ebin/kernel.app
index 1b39f7df07..3388f28e91 100644
--- a/bootstrap/lib/kernel/ebin/kernel.app
+++ b/bootstrap/lib/kernel/ebin/kernel.app
@@ -21,7 +21,7 @@
{application, kernel,
[
{description, "ERTS CXC 138 10"},
- {vsn, "3.0.3"},
+ {vsn, "3.1"},
{modules, [application,
application_controller,
application_master,
diff --git a/bootstrap/lib/kernel/ebin/kernel.appup b/bootstrap/lib/kernel/ebin/kernel.appup
index 02cf129b42..3ff5aa73f0 100644
--- a/bootstrap/lib/kernel/ebin/kernel.appup
+++ b/bootstrap/lib/kernel/ebin/kernel.appup
@@ -15,7 +15,7 @@
%% under the License.
%%
%% %CopyrightEnd%
-{"3.0.3",
+{"3.1",
%% Up from - max one major revision back
[{<<"3\\.0(\\.[0-9]+)*">>,[restart_new_emulator]}], % OTP-17
%% Down to - max one major revision back
diff --git a/bootstrap/lib/kernel/ebin/kernel.beam b/bootstrap/lib/kernel/ebin/kernel.beam
index c938b218e1..e38c4baa31 100644
--- a/bootstrap/lib/kernel/ebin/kernel.beam
+++ b/bootstrap/lib/kernel/ebin/kernel.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/kernel_config.beam b/bootstrap/lib/kernel/ebin/kernel_config.beam
index c35da2c73d..5a4649a054 100644
--- a/bootstrap/lib/kernel/ebin/kernel_config.beam
+++ b/bootstrap/lib/kernel/ebin/kernel_config.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/net_kernel.beam b/bootstrap/lib/kernel/ebin/net_kernel.beam
index 33193008c7..757f5f5dd0 100644
--- a/bootstrap/lib/kernel/ebin/net_kernel.beam
+++ b/bootstrap/lib/kernel/ebin/net_kernel.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/os.beam b/bootstrap/lib/kernel/ebin/os.beam
index b1b92ca36b..d16dbbac73 100644
--- a/bootstrap/lib/kernel/ebin/os.beam
+++ b/bootstrap/lib/kernel/ebin/os.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/pg2.beam b/bootstrap/lib/kernel/ebin/pg2.beam
index c26b5a4ffc..db89630d70 100644
--- a/bootstrap/lib/kernel/ebin/pg2.beam
+++ b/bootstrap/lib/kernel/ebin/pg2.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/ram_file.beam b/bootstrap/lib/kernel/ebin/ram_file.beam
index 4d6df15730..a8b23f5490 100644
--- a/bootstrap/lib/kernel/ebin/ram_file.beam
+++ b/bootstrap/lib/kernel/ebin/ram_file.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/standard_error.beam b/bootstrap/lib/kernel/ebin/standard_error.beam
index 21b5722bb2..be4f9292e7 100644
--- a/bootstrap/lib/kernel/ebin/standard_error.beam
+++ b/bootstrap/lib/kernel/ebin/standard_error.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/user.beam b/bootstrap/lib/kernel/ebin/user.beam
index 0e06c78fce..d69339866a 100644
--- a/bootstrap/lib/kernel/ebin/user.beam
+++ b/bootstrap/lib/kernel/ebin/user.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/user_drv.beam b/bootstrap/lib/kernel/ebin/user_drv.beam
index 4a2cd01b0d..c21b663e20 100644
--- a/bootstrap/lib/kernel/ebin/user_drv.beam
+++ b/bootstrap/lib/kernel/ebin/user_drv.beam
Binary files differ
diff --git a/bootstrap/lib/kernel/ebin/wrap_log_reader.beam b/bootstrap/lib/kernel/ebin/wrap_log_reader.beam
index f35187c269..9269e7847e 100644
--- a/bootstrap/lib/kernel/ebin/wrap_log_reader.beam
+++ b/bootstrap/lib/kernel/ebin/wrap_log_reader.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/beam_lib.beam b/bootstrap/lib/stdlib/ebin/beam_lib.beam
index b22233171b..b0f2c784da 100644
--- a/bootstrap/lib/stdlib/ebin/beam_lib.beam
+++ b/bootstrap/lib/stdlib/ebin/beam_lib.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/binary.beam b/bootstrap/lib/stdlib/ebin/binary.beam
index 14d6ba7976..666544c492 100644
--- a/bootstrap/lib/stdlib/ebin/binary.beam
+++ b/bootstrap/lib/stdlib/ebin/binary.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/c.beam b/bootstrap/lib/stdlib/ebin/c.beam
index b184c21d94..3fed93c41a 100644
--- a/bootstrap/lib/stdlib/ebin/c.beam
+++ b/bootstrap/lib/stdlib/ebin/c.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/calendar.beam b/bootstrap/lib/stdlib/ebin/calendar.beam
index 19e0443dd2..f40de9bb82 100644
--- a/bootstrap/lib/stdlib/ebin/calendar.beam
+++ b/bootstrap/lib/stdlib/ebin/calendar.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/dets.beam b/bootstrap/lib/stdlib/ebin/dets.beam
index 6f430b8c6c..e045268ef6 100644
--- a/bootstrap/lib/stdlib/ebin/dets.beam
+++ b/bootstrap/lib/stdlib/ebin/dets.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/dets_server.beam b/bootstrap/lib/stdlib/ebin/dets_server.beam
index 6bbefbbf1e..46c6769dfe 100644
--- a/bootstrap/lib/stdlib/ebin/dets_server.beam
+++ b/bootstrap/lib/stdlib/ebin/dets_server.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/dets_utils.beam b/bootstrap/lib/stdlib/ebin/dets_utils.beam
index cb4da358d6..88ddbbe73b 100644
--- a/bootstrap/lib/stdlib/ebin/dets_utils.beam
+++ b/bootstrap/lib/stdlib/ebin/dets_utils.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/dets_v8.beam b/bootstrap/lib/stdlib/ebin/dets_v8.beam
index eac0a308b7..6b8ec782bb 100644
--- a/bootstrap/lib/stdlib/ebin/dets_v8.beam
+++ b/bootstrap/lib/stdlib/ebin/dets_v8.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/dets_v9.beam b/bootstrap/lib/stdlib/ebin/dets_v9.beam
index c85adabcae..61ad68555d 100644
--- a/bootstrap/lib/stdlib/ebin/dets_v9.beam
+++ b/bootstrap/lib/stdlib/ebin/dets_v9.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/dict.beam b/bootstrap/lib/stdlib/ebin/dict.beam
index e39c79bd5a..6ba808d6af 100644
--- a/bootstrap/lib/stdlib/ebin/dict.beam
+++ b/bootstrap/lib/stdlib/ebin/dict.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/digraph_utils.beam b/bootstrap/lib/stdlib/ebin/digraph_utils.beam
index 64783f4e1d..fdc6741439 100644
--- a/bootstrap/lib/stdlib/ebin/digraph_utils.beam
+++ b/bootstrap/lib/stdlib/ebin/digraph_utils.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/edlin.beam b/bootstrap/lib/stdlib/ebin/edlin.beam
index 12aa74c4ac..6611ca02f0 100644
--- a/bootstrap/lib/stdlib/ebin/edlin.beam
+++ b/bootstrap/lib/stdlib/ebin/edlin.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/edlin_expand.beam b/bootstrap/lib/stdlib/ebin/edlin_expand.beam
index e182f67bd8..3d9846bdcf 100644
--- a/bootstrap/lib/stdlib/ebin/edlin_expand.beam
+++ b/bootstrap/lib/stdlib/ebin/edlin_expand.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/epp.beam b/bootstrap/lib/stdlib/ebin/epp.beam
index 83469557cd..96422d9ecc 100644
--- a/bootstrap/lib/stdlib/ebin/epp.beam
+++ b/bootstrap/lib/stdlib/ebin/epp.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/erl_expand_records.beam b/bootstrap/lib/stdlib/ebin/erl_expand_records.beam
index ff84000ac0..d4f56f298c 100644
--- a/bootstrap/lib/stdlib/ebin/erl_expand_records.beam
+++ b/bootstrap/lib/stdlib/ebin/erl_expand_records.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/erl_lint.beam b/bootstrap/lib/stdlib/ebin/erl_lint.beam
index 9c6c4be617..018ed7ec06 100644
--- a/bootstrap/lib/stdlib/ebin/erl_lint.beam
+++ b/bootstrap/lib/stdlib/ebin/erl_lint.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/erl_parse.beam b/bootstrap/lib/stdlib/ebin/erl_parse.beam
index ddef03db31..373eaef459 100644
--- a/bootstrap/lib/stdlib/ebin/erl_parse.beam
+++ b/bootstrap/lib/stdlib/ebin/erl_parse.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/erl_pp.beam b/bootstrap/lib/stdlib/ebin/erl_pp.beam
index cb6bd2056c..12755c710e 100644
--- a/bootstrap/lib/stdlib/ebin/erl_pp.beam
+++ b/bootstrap/lib/stdlib/ebin/erl_pp.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/erl_scan.beam b/bootstrap/lib/stdlib/ebin/erl_scan.beam
index b2b97bda8a..9fbcb7b0e2 100644
--- a/bootstrap/lib/stdlib/ebin/erl_scan.beam
+++ b/bootstrap/lib/stdlib/ebin/erl_scan.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/erl_tar.beam b/bootstrap/lib/stdlib/ebin/erl_tar.beam
index 76f5f95270..193ab96212 100644
--- a/bootstrap/lib/stdlib/ebin/erl_tar.beam
+++ b/bootstrap/lib/stdlib/ebin/erl_tar.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/error_logger_file_h.beam b/bootstrap/lib/stdlib/ebin/error_logger_file_h.beam
index 673ee6e8b2..5c1bc6045f 100644
--- a/bootstrap/lib/stdlib/ebin/error_logger_file_h.beam
+++ b/bootstrap/lib/stdlib/ebin/error_logger_file_h.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/error_logger_tty_h.beam b/bootstrap/lib/stdlib/ebin/error_logger_tty_h.beam
index 87d58cf378..9711f57e43 100644
--- a/bootstrap/lib/stdlib/ebin/error_logger_tty_h.beam
+++ b/bootstrap/lib/stdlib/ebin/error_logger_tty_h.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/escript.beam b/bootstrap/lib/stdlib/ebin/escript.beam
index 401b41a2f5..53f963ce79 100644
--- a/bootstrap/lib/stdlib/ebin/escript.beam
+++ b/bootstrap/lib/stdlib/ebin/escript.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/ets.beam b/bootstrap/lib/stdlib/ebin/ets.beam
index 8d024aaa99..6325142290 100644
--- a/bootstrap/lib/stdlib/ebin/ets.beam
+++ b/bootstrap/lib/stdlib/ebin/ets.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/file_sorter.beam b/bootstrap/lib/stdlib/ebin/file_sorter.beam
index b5844d5ddb..fbca44c892 100644
--- a/bootstrap/lib/stdlib/ebin/file_sorter.beam
+++ b/bootstrap/lib/stdlib/ebin/file_sorter.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/filelib.beam b/bootstrap/lib/stdlib/ebin/filelib.beam
index ab3600c2f2..5941f9189f 100644
--- a/bootstrap/lib/stdlib/ebin/filelib.beam
+++ b/bootstrap/lib/stdlib/ebin/filelib.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/filename.beam b/bootstrap/lib/stdlib/ebin/filename.beam
index 551e2d3f01..cf6be54668 100644
--- a/bootstrap/lib/stdlib/ebin/filename.beam
+++ b/bootstrap/lib/stdlib/ebin/filename.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/gb_sets.beam b/bootstrap/lib/stdlib/ebin/gb_sets.beam
index feadc1d1ed..cf763f51a0 100644
--- a/bootstrap/lib/stdlib/ebin/gb_sets.beam
+++ b/bootstrap/lib/stdlib/ebin/gb_sets.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/gb_trees.beam b/bootstrap/lib/stdlib/ebin/gb_trees.beam
index 879b8cdf76..6660dcb787 100644
--- a/bootstrap/lib/stdlib/ebin/gb_trees.beam
+++ b/bootstrap/lib/stdlib/ebin/gb_trees.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/gen.beam b/bootstrap/lib/stdlib/ebin/gen.beam
index b9f4b6a6d7..61259fd267 100644
--- a/bootstrap/lib/stdlib/ebin/gen.beam
+++ b/bootstrap/lib/stdlib/ebin/gen.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/gen_event.beam b/bootstrap/lib/stdlib/ebin/gen_event.beam
index 4359a69f03..d46e225182 100644
--- a/bootstrap/lib/stdlib/ebin/gen_event.beam
+++ b/bootstrap/lib/stdlib/ebin/gen_event.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/gen_server.beam b/bootstrap/lib/stdlib/ebin/gen_server.beam
index 7a7b4ad0f0..11487e747c 100644
--- a/bootstrap/lib/stdlib/ebin/gen_server.beam
+++ b/bootstrap/lib/stdlib/ebin/gen_server.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/io.beam b/bootstrap/lib/stdlib/ebin/io.beam
index 6c80bfe78b..fd64aedde1 100644
--- a/bootstrap/lib/stdlib/ebin/io.beam
+++ b/bootstrap/lib/stdlib/ebin/io.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/io_lib.beam b/bootstrap/lib/stdlib/ebin/io_lib.beam
index ad77021190..3e22b9671b 100644
--- a/bootstrap/lib/stdlib/ebin/io_lib.beam
+++ b/bootstrap/lib/stdlib/ebin/io_lib.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/io_lib_format.beam b/bootstrap/lib/stdlib/ebin/io_lib_format.beam
index fcfce54d0e..b7eb4ee64b 100644
--- a/bootstrap/lib/stdlib/ebin/io_lib_format.beam
+++ b/bootstrap/lib/stdlib/ebin/io_lib_format.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/io_lib_fread.beam b/bootstrap/lib/stdlib/ebin/io_lib_fread.beam
index 8c45cf484c..b4af88d5b5 100644
--- a/bootstrap/lib/stdlib/ebin/io_lib_fread.beam
+++ b/bootstrap/lib/stdlib/ebin/io_lib_fread.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/io_lib_pretty.beam b/bootstrap/lib/stdlib/ebin/io_lib_pretty.beam
index fa7e6fcbc3..6a12b5a16b 100644
--- a/bootstrap/lib/stdlib/ebin/io_lib_pretty.beam
+++ b/bootstrap/lib/stdlib/ebin/io_lib_pretty.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/lib.beam b/bootstrap/lib/stdlib/ebin/lib.beam
index c9086e1796..1cc92d1b8d 100644
--- a/bootstrap/lib/stdlib/ebin/lib.beam
+++ b/bootstrap/lib/stdlib/ebin/lib.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/lists.beam b/bootstrap/lib/stdlib/ebin/lists.beam
index 96eb8f3a86..9db37e21d4 100644
--- a/bootstrap/lib/stdlib/ebin/lists.beam
+++ b/bootstrap/lib/stdlib/ebin/lists.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/math.beam b/bootstrap/lib/stdlib/ebin/math.beam
index c2e8dfb874..b4596ee515 100644
--- a/bootstrap/lib/stdlib/ebin/math.beam
+++ b/bootstrap/lib/stdlib/ebin/math.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/ms_transform.beam b/bootstrap/lib/stdlib/ebin/ms_transform.beam
index 9a55c521f8..fbc7b4c86d 100644
--- a/bootstrap/lib/stdlib/ebin/ms_transform.beam
+++ b/bootstrap/lib/stdlib/ebin/ms_transform.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/otp_internal.beam b/bootstrap/lib/stdlib/ebin/otp_internal.beam
index 670943aad3..9812be8bd9 100644
--- a/bootstrap/lib/stdlib/ebin/otp_internal.beam
+++ b/bootstrap/lib/stdlib/ebin/otp_internal.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/proc_lib.beam b/bootstrap/lib/stdlib/ebin/proc_lib.beam
index 55cfff4680..cb6a8d6049 100644
--- a/bootstrap/lib/stdlib/ebin/proc_lib.beam
+++ b/bootstrap/lib/stdlib/ebin/proc_lib.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/qlc.beam b/bootstrap/lib/stdlib/ebin/qlc.beam
index ec3f0d174e..f7852f3287 100644
--- a/bootstrap/lib/stdlib/ebin/qlc.beam
+++ b/bootstrap/lib/stdlib/ebin/qlc.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/qlc_pt.beam b/bootstrap/lib/stdlib/ebin/qlc_pt.beam
index 6184f0c295..1bf716cdc8 100644
--- a/bootstrap/lib/stdlib/ebin/qlc_pt.beam
+++ b/bootstrap/lib/stdlib/ebin/qlc_pt.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/re.beam b/bootstrap/lib/stdlib/ebin/re.beam
index 74b369bb0c..9374cfd78f 100644
--- a/bootstrap/lib/stdlib/ebin/re.beam
+++ b/bootstrap/lib/stdlib/ebin/re.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/shell.beam b/bootstrap/lib/stdlib/ebin/shell.beam
index 8c4734f208..37cb1626c2 100644
--- a/bootstrap/lib/stdlib/ebin/shell.beam
+++ b/bootstrap/lib/stdlib/ebin/shell.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/sofs.beam b/bootstrap/lib/stdlib/ebin/sofs.beam
index 90d82c23c0..976483833f 100644
--- a/bootstrap/lib/stdlib/ebin/sofs.beam
+++ b/bootstrap/lib/stdlib/ebin/sofs.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/stdlib.app b/bootstrap/lib/stdlib/ebin/stdlib.app
index 968f2e4d2f..295d208e42 100644
--- a/bootstrap/lib/stdlib/ebin/stdlib.app
+++ b/bootstrap/lib/stdlib/ebin/stdlib.app
@@ -19,7 +19,7 @@
%%
{application, stdlib,
[{description, "ERTS CXC 138 10"},
- {vsn, "2.2"},
+ {vsn, "2.3"},
{modules, [array,
base64,
beam_lib,
diff --git a/bootstrap/lib/stdlib/ebin/stdlib.appup b/bootstrap/lib/stdlib/ebin/stdlib.appup
index 541b1925e5..37251e8e2d 100644
--- a/bootstrap/lib/stdlib/ebin/stdlib.appup
+++ b/bootstrap/lib/stdlib/ebin/stdlib.appup
@@ -15,7 +15,7 @@
%% under the License.
%%
%% %CopyrightEnd%
-{"2.2",
+{"2.3",
%% Up from - max one major revision back
[{<<"2\\.[1-2](\\.[0-9]+)*">>,[restart_new_emulator]}, %% 17.1-17.3
{<<"2\\.0(\\.[0-9]+)*">>,[restart_new_emulator]}], %% 17.0
diff --git a/bootstrap/lib/stdlib/ebin/string.beam b/bootstrap/lib/stdlib/ebin/string.beam
index 79e579f0aa..ff1ae1db7d 100644
--- a/bootstrap/lib/stdlib/ebin/string.beam
+++ b/bootstrap/lib/stdlib/ebin/string.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/supervisor.beam b/bootstrap/lib/stdlib/ebin/supervisor.beam
index c2f7d78ec2..34c97e7f8f 100644
--- a/bootstrap/lib/stdlib/ebin/supervisor.beam
+++ b/bootstrap/lib/stdlib/ebin/supervisor.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/unicode.beam b/bootstrap/lib/stdlib/ebin/unicode.beam
index 76342ef108..138a1ce28d 100644
--- a/bootstrap/lib/stdlib/ebin/unicode.beam
+++ b/bootstrap/lib/stdlib/ebin/unicode.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/win32reg.beam b/bootstrap/lib/stdlib/ebin/win32reg.beam
index 5d023ecb93..b37ead63ba 100644
--- a/bootstrap/lib/stdlib/ebin/win32reg.beam
+++ b/bootstrap/lib/stdlib/ebin/win32reg.beam
Binary files differ
diff --git a/bootstrap/lib/stdlib/ebin/zip.beam b/bootstrap/lib/stdlib/ebin/zip.beam
index 465a8b95a7..c3a1c67c8f 100644
--- a/bootstrap/lib/stdlib/ebin/zip.beam
+++ b/bootstrap/lib/stdlib/ebin/zip.beam
Binary files differ
diff --git a/erts/autoconf/vxworks/sed.general b/erts/autoconf/vxworks/sed.general
index efa4e99054..c82b7348cf 100644
--- a/erts/autoconf/vxworks/sed.general
+++ b/erts/autoconf/vxworks/sed.general
@@ -78,8 +78,6 @@ s|@WITH_SCTP@||
# HiPE
s|@HIPE_ENABLED@||
-s|@PERFCTR_PATH@||
-s|@USE_PERFCTR@||
# m4
s|@OPSYS@|noopsys|
diff --git a/erts/configure.in b/erts/configure.in
index 1676d3d216..f248cbe34d 100644
--- a/erts/configure.in
+++ b/erts/configure.in
@@ -264,24 +264,6 @@ AS_HELP_STRING([--enable-m32-build],
esac
],enable_m32_build=no)
-AC_SUBST(PERFCTR_PATH)
-AC_ARG_WITH(perfctr,
-AS_HELP_STRING([--with-perfctr=PATH],
- [specify location of perfctr include and lib])
-AS_HELP_STRING([--without-perfctr], [don't use perfctr (default)]))
-
-if test "x$with_perfctr" = "xno" -o "x$with_perfctr" = "x" ; then
- PERFCTR_PATH=
-else
- if test ! -f "$with_perfctr/usr.lib/libperfctr.a" ; then
- AC_MSG_ERROR(Invalid path to option --with-perfctr=PATH)
- fi
- PERFCTR_PATH="$with_perfctr"
- AC_DEFINE(USE_PERFCTR,[1],
- [Define to enable hrvtime() on Linux systems with perfctr extension])
-fi
-
-
AC_ARG_WITH(dynamic-trace,
AS_HELP_STRING([--with-dynamic-trace={dtrace|systemtap}],
[specify use of dynamic trace framework, dtrace or systemtap])
@@ -1784,7 +1766,7 @@ if test x"$ac_cv_header_netinet_sctp_h" = x"yes"; then
AC_CHECK_DECLS([SCTP_UNORDERED, SCTP_ADDR_OVER, SCTP_ABORT,
SCTP_EOF, SCTP_SENDALL, SCTP_ADDR_CONFIRMED,
SCTP_DELAYED_ACK_TIME,
- SCTP_EMPTY,
+ SCTP_EMPTY, SCTP_UNCONFIRMED,
SCTP_CLOSED, SCTPS_IDLE,
SCTP_BOUND, SCTPS_BOUND,
SCTP_LISTEN, SCTPS_LISTEN,
@@ -3503,19 +3485,6 @@ fi
AC_SUBST(NATIVE_LIBS_ENABLED)
#
-# Check if HiPE should use a standard installation of perfctr.
-#
-AC_SUBST(USE_PERFCTR)
-if test "x$HIPE_ENABLED" = "xyes" ; then
- if test "x$with_perfctr" = "x" ; then
- AC_CHECK_LIB(perfctr, vperfctr_info, [USE_PERFCTR=1
- AC_DEFINE(USE_PERFCTR,[1],[Define to enable hrvtime() on Linux systems with perfctr extension])])
- elif test "x$with_perfctr" != "xno" ; then
- USE_PERFCTR=1
- fi
-fi
-
-#
# Check for working poll().
#
AC_MSG_CHECKING([for working poll()])
@@ -4905,6 +4874,11 @@ LDFLAGS="$LDFLAGS $sanitizers"
])
dnl ----------------------------------------------------------------------
+dnl Check for log2
+dnl ----------------------------------------------------------------------
+AC_CHECK_FUNCS([log2])
+
+dnl ----------------------------------------------------------------------
dnl Output the result.
dnl ----------------------------------------------------------------------
diff --git a/erts/doc/src/notes.xml b/erts/doc/src/notes.xml
index c896ee0cae..af0d4d7377 100644
--- a/erts/doc/src/notes.xml
+++ b/erts/doc/src/notes.xml
@@ -30,6 +30,65 @@
</header>
<p>This document describes the changes made to the ERTS application.</p>
+<section><title>Erts 6.3.1</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Fix getifaddrs realloc pointer error</p>
+ <p>
+ When a buffer was exhausted and subsequently reallocated,
+ we could get an unsafe pointer pointing to faulty memory.</p>
+ <p>
+ For this to occur we would need to have a large number of
+ interfaces and a reallocation of memory to a lower
+ addresses.</p>
+ <p>
+ The symptom would be garbage returned from
+ erlang:port_control(Port, 25, [])
+ (prim_inet:getifaddrs(Port) resulting in a badarg) or a
+ segmentation fault.</p>
+ <p>
+ Own Id: OTP-12445</p>
+ </item>
+ <item>
+ <p>
+ Don't close all file descriptors twice in child_setup</p>
+ <p>
+ The commit c2b4eab25c907f453a394d382c04cd04e6c06b49
+ introduced an error in which child_setup erroneously
+ tried to close all file descriptors twice.</p>
+ <p>
+ Use closefrom() if available when closing all file
+ descriptors.</p>
+ <p>
+ The function closefrom() was only used in the vfork()
+ case before but is now also used in the fork() case if
+ available.</p>
+ <p>
+ Own Id: OTP-12446</p>
+ </item>
+ <item>
+ <p>
+ During a crashdump all file descriptors are closed to
+ ensure the closing of the epmd port and to reserve a file
+ descriptor for the crashdump file.</p>
+ <p>
+ If a driver (third party library) cannot handle closing
+ of sockets this could result in a segmentation fault in
+ which case a crashdump would not be produced. This is now
+ fixed by only closing inets sockets via an emergency
+ close callback to the driver and thus closing the epmd
+ socket.</p>
+ <p>
+ Own Id: OTP-12447</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Erts 6.3</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/erts/emulator/Makefile.in b/erts/emulator/Makefile.in
index e4cd566285..a632faf57d 100644
--- a/erts/emulator/Makefile.in
+++ b/erts/emulator/Makefile.in
@@ -349,16 +349,6 @@ endif
EPCRE_LIB = $(ERL_TOP)/erts/emulator/pcre/obj/$(TARGET)/$(TYPE)/$(LIB_PREFIX)epcre$(LIB_SUFFIX)
DEPLIBS += $(EPCRE_LIB)
-PERFCTR_PATH=@PERFCTR_PATH@
-USE_PERFCTR=@USE_PERFCTR@
-ifdef PERFCTR_PATH
-LIBS += $(PERFCTR_PATH)/usr.lib/libperfctr.a
-else
-ifdef USE_PERFCTR
-LIBS += -lperfctr
-endif
-endif
-
LIBSCTP = @LIBSCTP@
ORG_THR_LIBS=@EMU_THR_LIBS@
@@ -566,9 +556,6 @@ HIPE_ppc64_TAB=hipe/hipe_ppc64.tab $(HIPE_ARCH64_TAB)
HIPE_arm_TAB=hipe/hipe_arm.tab
HIPE_ARCH_TAB=$(HIPE_$(ARCH)_TAB)
BIFS += hipe/hipe_bif0.tab hipe/hipe_bif1.tab hipe/hipe_bif2.tab $(HIPE_ARCH_TAB)
-ifdef USE_PERFCTR
-BIFS += hipe/hipe_perfctr.tab
-endif
endif
$(TARGET)/erl_bif_table.c \
@@ -664,10 +651,6 @@ COMMON_INCLUDES += -I../include/internal -I../include/internal/$(TARGET)
INCLUDES = -I$(TTF_DIR) $(COMMON_INCLUDES)
-ifdef PERFCTR_PATH
-INCLUDES += -I$(PERFCTR_PATH)/usr.lib -I$(PERFCTR_PATH)/linux/include
-endif
-
ifeq ($(TARGET),win32)
$(OBJDIR)/dll_sys.o: sys/$(ERLANG_OSTYPE)/sys.c
$(V_CC) $(CFLAGS) -DERL_RUN_SHARED_LIB=1 $(INCLUDES) -c $< -o $@
@@ -927,9 +910,6 @@ HIPE_OBJS= \
$(OBJDIR)/hipe_mode_switch.o \
$(OBJDIR)/hipe_native_bif.o \
$(OBJDIR)/hipe_stack.o $(HIPE_ARCH_OBJS)
-ifdef USE_PERFCTR
-HIPE_OBJS += $(OBJDIR)/hipe_perfctr.o
-endif
ifdef HIPE_ENABLED
EXTRA_BASE_OBJS += $(HIPE_OBJS)
endif
diff --git a/erts/emulator/beam/benchmark.c b/erts/emulator/beam/benchmark.c
index 8613131176..b16fe6b271 100644
--- a/erts/emulator/beam/benchmark.c
+++ b/erts/emulator/beam/benchmark.c
@@ -37,37 +37,9 @@ unsigned long long major_gc;
#ifdef BM_TIMERS
-#if (defined(__i386__) || defined(__x86_64__)) && USE_PERFCTR
-
-#include "libperfctr.h"
-struct vperfctr *system_clock;
-double cpu_khz;
-BM_NEW_TIMER(start);
-
-static double get_hrvtime(void)
-{
- unsigned long long ticks;
- double milli_seconds;
-
- ticks = vperfctr_read_tsc(system_clock);
- milli_seconds = (double)ticks / cpu_khz;
- return milli_seconds;
-}
-
-static void stop_hrvtime(void)
-{
- if(system_clock)
- {
- vperfctr_stop(system_clock);
- vperfctr_close(system_clock);
- system_clock = NULL;
- }
-}
-
-#else /* not perfctr, asuming Solaris */
+/* assuming Solaris */
#include <time.h>
BM_TIMER_T system_clock;
-#endif
unsigned long local_pause_times[MAX_PAUSE_TIME];
unsigned long pause_times[MAX_PAUSE_TIME];
@@ -117,40 +89,6 @@ unsigned long long message_sizes[1000];
void init_benchmarking()
{
#ifdef BM_TIMERS
-#if (defined(__i386__) || defined(__x86_64__)) && USE_PERFCTR
- /* pass `--with-perfctr=/path/to/perfctr' when configuring */
- struct perfctr_info info;
- struct vperfctr_control control;
- int i;
-
- system_clock = vperfctr_open();
- if (system_clock != NULL)
- {
- if (vperfctr_info(system_clock,&info) >= 0)
- {
- cpu_khz = (double)info.cpu_khz;
- if (info.cpu_features & PERFCTR_FEATURE_RDTSC)
- {
- memset(&control,0,sizeof control);
- control.cpu_control.tsc_on = 1;
- }
- }
- if (vperfctr_control(system_clock,&control) < 0)
- {
- vperfctr_close(system_clock);
- system_clock = NULL;
- }
- }
-
- for (i = 0; i < 1000; i++)
- {
- BM_START_TIMER(system);
- BM_STOP_TIMER(system);
- }
-
- timer_time = system_time / 1000;
- start_time = 0;
-#else
int i;
for (i = 0; i < 1000; i++)
{
@@ -158,7 +96,6 @@ void init_benchmarking()
BM_STOP_TIMER(system);
}
timer_time = system_time / 1000;
-#endif
for (i = 0; i < MAX_PAUSE_TIME; i++) {
local_pause_times[i] = 0;
diff --git a/erts/emulator/beam/benchmark.h b/erts/emulator/beam/benchmark.h
index 766edaac42..904564a96b 100644
--- a/erts/emulator/beam/benchmark.h
+++ b/erts/emulator/beam/benchmark.h
@@ -37,10 +37,7 @@
/* BM_TIMERS keeps track of the time spent in diferent parts of the
* system. It only measures accual active time, not time spent in idle
- * mode. These timers requires hardware support. For Linux, use the
- * package perfctr from user.it.uu.se/~mikpe/linux/perfctr. If this
- * package is not specified when configuring the system
- * (--with-perfctr=PATH), the Solaris hrtime_t will be used.
+ * mode. Currently, the Solaris hrtime_t will be used.
* To add new timers look below.
*/
#define BM_TIMERS
@@ -142,38 +139,7 @@ extern unsigned long long major_gc;
* meassure (send time in shared heap for instance).
*/
-#if (defined(__i386__) || defined(__x86_64__)) && USE_PERFCTR
-#include "libperfctr.h"
-
-#define BM_TIMER_T double
-
-extern struct vperfctr *system_clock;
-extern double cpu_khz;
-extern BM_TIMER_T start_time;
-
-#define BM_START_TIMER(t) start_time = \
- (BM_TIMER_T)vperfctr_read_tsc(system_clock) / \
- cpu_khz;
-
-#define BM_STOP_TIMER(t) do { \
- BM_TIMER_T tmp = ((BM_TIMER_T)vperfctr_read_tsc(system_clock) / cpu_khz); \
- tmp -= (start_time + timer_time); \
- t##_time += (tmp > 0 ? tmp : 0); \
-} while(0)
-
-#define BM_TIME_PRINTER(str,time) do { \
- int min,sec,milli,micro; \
- BM_TIMER_T tmp = (time) * 1000; \
- micro = (uint)(tmp - ((int)(tmp / 1000)) * 1000); \
- tmp /= 1000; \
- milli = (uint)(tmp - ((int)(tmp / 1000)) * 1000); \
- tmp /= 1000; \
- sec = (uint)(tmp - ((int)(tmp / 60)) * 60); \
- min = (uint)tmp / 60; \
- erts_fprintf(file,str": %d:%02d.%03d %03d\n",min,sec,milli,micro); \
-} while(0)
-
-#else /* !USE_PERFCTR (Assuming Solaris) */
+/* (Assuming Solaris) */
#define BM_TIMER_T hrtime_t
#define BM_START_TIMER(t) system_clock = sys_gethrtime()
@@ -196,7 +162,6 @@ extern BM_TIMER_T start_time;
} while(0)
extern BM_TIMER_T system_clock;
-#endif /* USE_PERFCTR */
extern BM_TIMER_T timer_time;
extern BM_TIMER_T system_time;
diff --git a/erts/emulator/beam/bif.tab b/erts/emulator/beam/bif.tab
index aea3224342..1d0d214e77 100644
--- a/erts/emulator/beam/bif.tab
+++ b/erts/emulator/beam/bif.tab
@@ -194,6 +194,7 @@ bif math:erf/1
bif math:erfc/1
bif math:exp/1
bif math:log/1
+bif math:log2/1
bif math:log10/1
bif math:sqrt/1
bif math:atan2/2
diff --git a/erts/emulator/beam/erl_driver.h b/erts/emulator/beam/erl_driver.h
index f9938fc66c..e498ac70ec 100644
--- a/erts/emulator/beam/erl_driver.h
+++ b/erts/emulator/beam/erl_driver.h
@@ -133,7 +133,7 @@ typedef struct {
#define ERL_DRV_EXTENDED_MARKER (0xfeeeeeed)
#define ERL_DRV_EXTENDED_MAJOR_VERSION 3
-#define ERL_DRV_EXTENDED_MINOR_VERSION 1
+#define ERL_DRV_EXTENDED_MINOR_VERSION 2
/*
* The emulator will refuse to load a driver with a major version
@@ -361,6 +361,9 @@ typedef struct erl_drv_entry {
/* Called on behalf of driver_select when
it is safe to release 'event'. A typical
unix driver would call close(event) */
+ void (*emergency_close)(ErlDrvData drv_data);
+ /* called when the port is closed abruptly.
+ specifically when erl_crash_dump is called. */
/* When adding entries here, dont forget to pad in obsolete/driver.h */
} ErlDrvEntry;
diff --git a/erts/emulator/beam/erl_math.c b/erts/emulator/beam/erl_math.c
index 16d4fdc09c..9b864628db 100644
--- a/erts/emulator/beam/erl_math.c
+++ b/erts/emulator/beam/erl_math.c
@@ -207,6 +207,24 @@ BIF_RETTYPE math_log_1(BIF_ALIST_1)
return math_call_1(BIF_P, log, BIF_ARG_1);
}
+#ifdef HAVE_LOG2
+static double
+log2_wrapper(double x)
+{
+ return log2(x);
+}
+#else
+static double
+log2_wrapper(double x)
+{
+ return log(x) / 0.6931471805599453; /* log(2.0); */
+}
+#endif
+
+BIF_RETTYPE math_log2_1(BIF_ALIST_1)
+{
+ return math_call_1(BIF_P, log2_wrapper, BIF_ARG_1);
+}
BIF_RETTYPE math_log10_1(BIF_ALIST_1)
{
diff --git a/erts/emulator/beam/global.h b/erts/emulator/beam/global.h
index ec8c1e3ccb..5330f389e0 100644
--- a/erts/emulator/beam/global.h
+++ b/erts/emulator/beam/global.h
@@ -161,6 +161,7 @@ struct erts_driver_t_ {
void (*ready_async)(ErlDrvData drv_data, ErlDrvThreadData thread_data); /* Might be NULL */
void (*process_exit)(ErlDrvData drv_data, ErlDrvMonitor *monitor);
void (*stop_select)(ErlDrvEvent event, void*); /* Might be NULL */
+ void (*emergency_close)(ErlDrvData drv_data); /* Might be NULL */
};
extern erts_driver_t *driver_list;
@@ -883,6 +884,7 @@ Uint erts_port_ioq_size(Port *pp);
void erts_stale_drv_select(Eterm, ErlDrvPort, ErlDrvEvent, int, int);
Port *erts_get_heart_port(void);
+void erts_emergency_close_ports(void);
#if defined(ERTS_SMP) && defined(ERTS_ENABLE_LOCK_COUNT)
void erts_lcnt_enable_io_lock_count(int enable);
diff --git a/erts/emulator/beam/io.c b/erts/emulator/beam/io.c
index d46ac9b336..dc4c6fc350 100644
--- a/erts/emulator/beam/io.c
+++ b/erts/emulator/beam/io.c
@@ -7342,6 +7342,8 @@ no_stop_select_callback(ErlDrvEvent event, void* private)
erts_send_error_to_logger_nogl(dsbufp);
}
+#define IS_DRIVER_VERSION_GE(DE,MAJOR,MINOR) \
+ ((DE)->major_version >= (MAJOR) && (DE)->minor_version >= (MINOR))
static int
init_driver(erts_driver_t *drv, ErlDrvEntry *de, DE_Handle *handle)
@@ -7389,6 +7391,7 @@ init_driver(erts_driver_t *drv, ErlDrvEntry *de, DE_Handle *handle)
drv->timeout = de->timeout ? de->timeout : no_timeout_callback;
drv->ready_async = de->ready_async;
drv->process_exit = de->process_exit;
+ drv->emergency_close = IS_DRIVER_VERSION_GE(de,3,2) ? de->emergency_close : NULL;
if (de->stop_select)
drv->stop_select = de->stop_select;
else
@@ -7407,6 +7410,8 @@ init_driver(erts_driver_t *drv, ErlDrvEntry *de, DE_Handle *handle)
}
}
+#undef IS_DRIVER_VERSION_GE
+
void
erts_destroy_driver(erts_driver_t *drv)
{
@@ -7550,7 +7555,7 @@ Port *erts_get_heart_port(void)
if (!port)
continue;
/* only examine undead or alive ports */
- if (erts_atomic32_read_nob(&port->state) & ERTS_PORT_SFLGS_DEAD)
+ if (erts_atomic32_read_nob(&port->state) & ERTS_PORT_SFLGS_INVALID_DRIVER_LOOKUP)
continue;
/* immediate atom compare */
reg = port->common.u.alive.reg;
@@ -7561,3 +7566,23 @@ Port *erts_get_heart_port(void)
return NULL;
}
+
+void erts_emergency_close_ports(void)
+{
+ int ix, max = erts_ptab_max(&erts_port);
+
+ for (ix = 0; ix < max; ix++) {
+ Port *port = erts_pix2port(ix);
+
+ if (!port)
+ continue;
+ /* only examine undead or alive ports */
+ if (erts_atomic32_read_nob(&port->state) & ERTS_PORT_SFLGS_INVALID_DRIVER_LOOKUP)
+ continue;
+
+ /* emergency close socket */
+ if (port->drv_ptr->emergency_close) {
+ port->drv_ptr->emergency_close((ErlDrvData) port->drv_data);
+ }
+ }
+}
diff --git a/erts/emulator/drivers/common/inet_drv.c b/erts/emulator/drivers/common/inet_drv.c
index c8b69b0a94..5196eb51c6 100644
--- a/erts/emulator/drivers/common/inet_drv.c
+++ b/erts/emulator/drivers/common/inet_drv.c
@@ -268,14 +268,13 @@ static BOOL (WINAPI *fpSetHandleInformation)(HANDLE,DWORD,DWORD);
#define sock_htonl(x) htonl((x))
#define sock_send(s,buf,len,flag) send((s),(buf),(len),(flag))
#define sock_sendv(s, vec, size, np, flag) \
- WSASend((s),(WSABUF*)(vec),\
- (size),(np),(flag),NULL,NULL)
+ WSASend((s),(WSABUF*)(vec),(size),(np),(flag),NULL,NULL)
#define sock_recv(s,buf,len,flag) recv((s),(buf),(len),(flag))
#define sock_recvfrom(s,buf,blen,flag,addr,alen) \
- recvfrom((s),(buf),(blen),(flag),(addr),(alen))
+ recvfrom((s),(buf),(blen),(flag),(addr),(alen))
#define sock_sendto(s,buf,blen,flag,addr,alen) \
- sendto((s),(buf),(blen),(flag),(addr),(alen))
+ sendto((s),(buf),(blen),(flag),(addr),(alen))
#define sock_hostname(buf, len) gethostname((buf), (len))
#define sock_getservbyname(name,proto) getservbyname((name),(proto))
@@ -360,9 +359,9 @@ static ssize_t writev_fallback(int fd, const struct iovec *iov, int iovcnt, int
#define sock_accept(s, addr, len) accept((s), (addr), (len))
#define sock_send(s,buf,len,flag) inet_send((s),(buf),(len),(flag))
#define sock_sendto(s,buf,blen,flag,addr,alen) \
- sendto((s),(buf),(blen),(flag),(addr),(alen))
+ sendto((s),(buf),(blen),(flag),(addr),(alen))
#define sock_sendv(s, vec, size, np, flag) \
- (*(np) = writev_fallback((s), (struct iovec*)(vec), (size), (*(np))))
+ (*(np) = writev_fallback((s), (struct iovec*)(vec), (size), (*(np))))
#define sock_sendmsg(s,msghdr,flag) sendmsg((s),(msghdr),(flag))
#define sock_open(af, type, proto) socket((af), (type), (proto))
@@ -1178,6 +1177,7 @@ static ErlDrvSSizeT tcp_inet_ctl(ErlDrvData, unsigned int,
static void tcp_inet_timeout(ErlDrvData);
static void tcp_inet_process_exit(ErlDrvData, ErlDrvMonitor *);
static void inet_stop_select(ErlDrvEvent, void*);
+static void inet_emergency_close(ErlDrvData);
#ifdef __WIN32__
static void tcp_inet_event(ErlDrvData, ErlDrvEvent);
static void find_dynamic_functions(void);
@@ -1288,7 +1288,8 @@ static struct erl_drv_entry tcp_inet_driver_entry =
ERL_DRV_FLAG_USE_PORT_LOCKING|ERL_DRV_FLAG_SOFT_BUSY,
NULL,
tcp_inet_process_exit,
- inet_stop_select
+ inet_stop_select,
+ inet_emergency_close
};
@@ -1341,7 +1342,8 @@ static struct erl_drv_entry udp_inet_driver_entry =
ERL_DRV_FLAG_USE_PORT_LOCKING,
NULL,
NULL,
- inet_stop_select
+ inet_stop_select,
+ inet_emergency_close
};
#endif
@@ -1375,7 +1377,8 @@ static struct erl_drv_entry sctp_inet_driver_entry =
ERL_DRV_FLAG_USE_PORT_LOCKING,
NULL,
NULL, /* process_exit */
- inet_stop_select
+ inet_stop_select,
+ inet_emergency_close
};
#endif
@@ -1421,7 +1424,7 @@ static int packet_inet_input(udp_descriptor* udesc, HANDLE event);
static int packet_inet_output(udp_descriptor* udesc, HANDLE event);
#endif
-/* convert descriptor poiner to inet_descriptor pointer */
+/* convert descriptor pointer to inet_descriptor pointer */
#define INETP(d) (&(d)->inet)
#ifdef __OSE__
@@ -2890,6 +2893,9 @@ static ErlDrvTermData am_sctp_rtoinfo, /* Option names */
/* For #sctp_paddrinfo{}: */
am_active, am_inactive,
+# if HAVE_DECL_SCTP_UNCONFIRMED
+ am_unconfirmed,
+# endif
/* For #sctp_status{}: */
# if HAVE_DECL_SCTP_EMPTY
@@ -3919,7 +3925,10 @@ static void inet_init_sctp(void) {
/* For #sctp_paddrinfo{}: */
INIT_ATOM(active);
INIT_ATOM(inactive);
-
+# if HAVE_DECL_SCTP_UNCONFIRMED
+ INIT_ATOM(unconfirmed);
+# endif
+
/* For #sctp_status{}: */
# if HAVE_DECL_SCTP_EMPTY
INIT_ATOM(empty);
@@ -4721,6 +4730,36 @@ static char* sockaddr_to_buf(struct sockaddr* addr, char* ptr, char* end)
return NULL;
}
+/* sockaddr_bufsz_need
+ * Returns the number of bytes needed to store the information
+ * through sockaddr_to_buf
+ */
+
+static size_t sockaddr_bufsz_need(struct sockaddr* addr)
+{
+ if (addr->sa_family == AF_INET || addr->sa_family == 0) {
+ return 1 + sizeof(struct in_addr);
+ }
+#if defined(HAVE_IN6) && defined(AF_INET6)
+ else if (addr->sa_family == AF_INET6) {
+ return 1 + sizeof(struct in6_addr);
+ }
+#endif
+#if defined(AF_LINK)
+ if (addr->sa_family == AF_LINK) {
+ struct sockaddr_dl *sdl_p = (struct sockaddr_dl*) addr;
+ return 2 + sdl_p->sdl_alen;
+ }
+#endif
+#if defined(AF_PACKET) && defined(HAVE_NETPACKET_PACKET_H)
+ else if(addr->sa_family == AF_PACKET) {
+ struct sockaddr_ll *sll_p = (struct sockaddr_ll*) addr;
+ return 2 + sll_p->sll_halen;
+ }
+#endif
+ return 0;
+}
+
static char* buf_to_sockaddr(char* ptr, char* end, struct sockaddr* addr)
{
buf_check(ptr,end,1);
@@ -5799,6 +5838,11 @@ done:
}
#elif defined(HAVE_GETIFADDRS)
+#ifdef DEBUG
+#define GETIFADDRS_BUFSZ (1)
+#else
+#define GETIFADDRS_BUFSZ (512)
+#endif
static ErlDrvSSizeT inet_ctl_getifaddrs(inet_descriptor* desc_p,
char **rbuf_pp, ErlDrvSizeT rsize)
@@ -5809,15 +5853,15 @@ static ErlDrvSSizeT inet_ctl_getifaddrs(inet_descriptor* desc_p,
char *buf_p;
char *buf_alloc_p;
- buf_size = 512;
- buf_alloc_p = ALLOC(buf_size);
+ buf_size = GETIFADDRS_BUFSZ;
+ buf_alloc_p = ALLOC(GETIFADDRS_BUFSZ);
buf_p = buf_alloc_p;
# define BUF_ENSURE(Size) \
do { \
int NEED_, GOT_ = buf_p - buf_alloc_p; \
NEED_ = GOT_ + (Size); \
if (NEED_ > buf_size) { \
- buf_size = NEED_ + 512; \
+ buf_size = NEED_ + GETIFADDRS_BUFSZ; \
buf_alloc_p = REALLOC(buf_alloc_p, buf_size); \
buf_p = buf_alloc_p + GOT_; \
} \
@@ -5830,7 +5874,7 @@ static ErlDrvSSizeT inet_ctl_getifaddrs(inet_descriptor* desc_p,
while (! (P_ = sockaddr_to_buf((sa), buf_p, \
buf_alloc_p+buf_size))) { \
int GOT_ = buf_p - buf_alloc_p; \
- buf_size += 512; \
+ buf_size += GETIFADDRS_BUFSZ; \
buf_alloc_p = REALLOC(buf_alloc_p, buf_size); \
buf_p = buf_alloc_p + GOT_; \
} \
@@ -5887,10 +5931,11 @@ static ErlDrvSSizeT inet_ctl_getifaddrs(inet_descriptor* desc_p,
|| ifa_p->ifa_addr->sa_family == AF_PACKET
#endif
) {
- char *bp = buf_p;
- BUF_ENSURE(1);
- SOCKADDR_TO_BUF(INET_IFOPT_HWADDR, ifa_p->ifa_addr);
- if (buf_p - bp < 4) buf_p = bp; /* Empty hwaddr */
+ size_t need = sockaddr_bufsz_need(ifa_p->ifa_addr);
+ if (need > 3) {
+ BUF_ENSURE(1 + need);
+ SOCKADDR_TO_BUF(INET_IFOPT_HWADDR, ifa_p->ifa_addr);
+ }
}
#endif
}
@@ -5905,6 +5950,7 @@ static ErlDrvSSizeT inet_ctl_getifaddrs(inet_descriptor* desc_p,
return buf_size;
# undef BUF_ENSURE
}
+#undef GETIFADDRS_BUFSZ
#else
@@ -7338,8 +7384,13 @@ static int load_paddrinfo (ErlDrvTermData * spec, int i,
case SCTP_INACTIVE:
i = LOAD_ATOM (spec, i, am_inactive);
break;
+# if HAVE_DECL_SCTP_UNCONFIRMED
+ case SCTP_UNCONFIRMED:
+ i = LOAD_ATOM (spec, i, am_unconfirmed);
+ break;
+# endif
default:
- ASSERT(0); /* NB: SCTP_UNCONFIRMED modifier not yet supported */
+ i = LOAD_ATOM (spec, i, am_undefined);
}
i = LOAD_INT (spec, i, pai->spinfo_cwnd);
i = LOAD_INT (spec, i, pai->spinfo_srtt);
@@ -8204,6 +8255,19 @@ static void inet_stop(inet_descriptor* desc)
FREE(desc);
}
+static void inet_emergency_close(ErlDrvData data)
+{
+ /* valid for any (UDP, TCP or SCTP) descriptor */
+ tcp_descriptor* tcp_desc = (tcp_descriptor*)data;
+ inet_descriptor* desc = INETP(tcp_desc);
+ DEBUGF(("inet_emergency_close(%ld) {s=%d\r\n",
+ (long)desc->port, desc->s));
+ if (desc->s != INVALID_SOCKET) {
+ sock_close(desc->s);
+ }
+}
+
+
static void set_default_msgq_limits(ErlDrvPort port)
{
ErlDrvSizeT q_high = INET_HIGH_MSGQ_WATERMARK;
diff --git a/erts/emulator/hipe/hipe_amd64.c b/erts/emulator/hipe/hipe_amd64.c
index 16c597e7b4..63646825b2 100644
--- a/erts/emulator/hipe/hipe_amd64.c
+++ b/erts/emulator/hipe/hipe_amd64.c
@@ -125,7 +125,7 @@ static void atexit_alloc_code_stats(void)
#define MAP_ANONYMOUS MAP_ANON
#endif
-static void morecore(unsigned int alloc_bytes)
+static int morecore(unsigned int alloc_bytes)
{
unsigned int map_bytes;
char *map_hint, *map_start;
@@ -174,10 +174,9 @@ static void morecore(unsigned int alloc_bytes)
abort();
}
#endif
- if (map_start == MAP_FAILED) {
- perror("mmap");
- abort();
- }
+ if (map_start == MAP_FAILED)
+ return -1;
+
ALLOC_CODE_STATS(total_mapped += map_bytes);
/* Merge adjacent mappings, so the trailing portion of the previous
@@ -197,6 +196,8 @@ static void morecore(unsigned int alloc_bytes)
}
ALLOC_CODE_STATS(atexit_alloc_code_stats());
+
+ return 0;
}
static void *alloc_code(unsigned int alloc_bytes)
@@ -206,8 +207,8 @@ static void *alloc_code(unsigned int alloc_bytes)
/* Align function entries. */
alloc_bytes = (alloc_bytes + 3) & ~3;
- if (code_bytes < alloc_bytes)
- morecore(alloc_bytes);
+ if (code_bytes < alloc_bytes && morecore(alloc_bytes) != 0)
+ return NULL;
ALLOC_CODE_STATS(++nr_allocs);
ALLOC_CODE_STATS(total_alloc += alloc_bytes);
res = code_next;
@@ -224,7 +225,6 @@ void *hipe_alloc_code(Uint nrbytes, Eterm callees, Eterm *trampolines, Process *
return alloc_code(nrbytes);
}
-
/* Make stub for native code calling exported beam function.
*/
void *hipe_make_native_stub(void *callee_exp, unsigned int beamArity)
@@ -253,6 +253,8 @@ void *hipe_make_native_stub(void *callee_exp, unsigned int beamArity)
((P_CALLEE_EXP + 4) >= 128 ? 3 : 0) +
(P_ARITY >= 128 ? 3 : 0);
codep = code = alloc_code(codeSize);
+ if (!code)
+ return NULL;
/* movl $callee_exp, P_CALLEE_EXP(%ebp); 3 or 6 bytes, plus 4 */
codep[0] = 0xc7;
diff --git a/erts/emulator/hipe/hipe_arch.h b/erts/emulator/hipe/hipe_arch.h
index 04ed980126..b45209b3f7 100644
--- a/erts/emulator/hipe/hipe_arch.h
+++ b/erts/emulator/hipe/hipe_arch.h
@@ -29,6 +29,7 @@ extern void hipe_patch_load_fe(Uint *address, Uint value);
extern int hipe_patch_insn(void *address, Uint value, Eterm type);
extern int hipe_patch_call(void *callAddress, void *destAddress, void *trampoline);
+extern void *hipe_alloc_code(Uint nrbytes, Eterm callees, Eterm *trampolines, Process *p);
extern void *hipe_make_native_stub(void *beamAddress, unsigned int beamArity);
#if defined(__sparc__)
diff --git a/erts/emulator/hipe/hipe_arm.c b/erts/emulator/hipe/hipe_arm.c
index 165eb543c8..c0c6305c68 100644
--- a/erts/emulator/hipe/hipe_arm.c
+++ b/erts/emulator/hipe/hipe_arm.c
@@ -283,6 +283,8 @@ void *hipe_make_native_stub(void *callee_exp, unsigned int beamArity)
*/
code = alloc_stub(4, &tramp_callemu);
+ if (!code)
+ return NULL;
callemu_offset = ((int)&nbif_callemu - ((int)&code[2] + 8)) >> 2;
if (!(callemu_offset >= -0x00800000 && callemu_offset <= 0x007FFFFF)) {
callemu_offset = ((int)tramp_callemu - ((int)&code[2] + 8)) >> 2;
diff --git a/erts/emulator/hipe/hipe_arm.h b/erts/emulator/hipe/hipe_arm.h
index 19f2a986cf..b9cd1a750c 100644
--- a/erts/emulator/hipe/hipe_arm.h
+++ b/erts/emulator/hipe/hipe_arm.h
@@ -40,8 +40,4 @@ static __inline__ int hipe_word32_address_ok(void *address)
extern void hipe_arm_inc_stack(void);
-/* for hipe_bifs_enter_code_2 */
-extern void *hipe_alloc_code(Uint nrbytes, Eterm callees, Eterm *trampolines, Process *p);
-#define HIPE_ALLOC_CODE(n,c,t,p) hipe_alloc_code((n),(c),(t),(p))
-
#endif /* HIPE_ARM_H */
diff --git a/erts/emulator/hipe/hipe_arm_bifs.m4 b/erts/emulator/hipe/hipe_arm_bifs.m4
index 57e51bb8b1..884240be9c 100644
--- a/erts/emulator/hipe/hipe_arm_bifs.m4
+++ b/erts/emulator/hipe/hipe_arm_bifs.m4
@@ -26,6 +26,7 @@ include(`hipe/hipe_arm_asm.m4')
.text
.p2align 2
+ .arm
`#if defined(ERTS_ENABLE_LOCK_CHECK) && defined(ERTS_SMP)
# define CALL_BIF(F) ldr r14, =F; str r14, [r0, #P_BIF_CALLEE]; bl hipe_debug_bif_wrapper
@@ -392,7 +393,14 @@ $1:
mov r0, P
/* Perform a quick save;call;restore;ret sequence. */
+#ifdef __thumb__
+ SAVE_CONTEXT_QUICK
+ bl $2
+ RESTORE_CONTEXT_QUICK
+ NBIF_RET(0)
+#else
QUICK_CALL_RET($2,0)
+#endif
.size $1, .-$1
.type $1, %function
#endif')
@@ -408,7 +416,14 @@ $1:
NBIF_ARG(r1,1,0)
/* Perform a quick save;call;restore;ret sequence. */
+#ifdef __thumb__
+ SAVE_CONTEXT_QUICK
+ bl $2
+ RESTORE_CONTEXT_QUICK
+ NBIF_RET(1)
+#else
QUICK_CALL_RET($2,1)
+#endif
.size $1, .-$1
.type $1, %function
#endif')
@@ -425,7 +440,14 @@ $1:
NBIF_ARG(r2,2,1)
/* Perform a quick save;call;restore;ret sequence. */
+#ifdef __thumb__
+ SAVE_CONTEXT_QUICK
+ bl $2
+ RESTORE_CONTEXT_QUICK
+ NBIF_RET(2)
+#else
QUICK_CALL_RET($2,2)
+#endif
.size $1, .-$1
.type $1, %function
#endif')
@@ -443,7 +465,14 @@ $1:
NBIF_ARG(r3,3,2)
/* Perform a quick save;call;restore;ret sequence. */
+#ifdef __thumb__
+ SAVE_CONTEXT_QUICK
+ bl $2
+ RESTORE_CONTEXT_QUICK
+ NBIF_RET(3)
+#else
QUICK_CALL_RET($2,3)
+#endif
.size $1, .-$1
.type $1, %function
#endif')
@@ -467,7 +496,14 @@ $1:
NBIF_ARG(r3,5,2)
/* Perform a quick save;call;restore;ret sequence. */
+#ifdef __thumb__
+ SAVE_CONTEXT_QUICK
+ bl $2
+ RESTORE_CONTEXT_QUICK
+ NBIF_RET(5)
+#else
QUICK_CALL_RET($2,5)
+#endif
.size $1, .-$1
.type $1, %function
#endif')
@@ -489,9 +525,16 @@ define(noproc_primop_interface_0,
#`define' HAVE_$1
.global $1
$1:
- /* XXX: this case is always trivial; how to suppress the branch? */
/* Perform a quick save;call;restore;ret sequence. */
+#ifdef __thumb__
+ SAVE_CONTEXT_QUICK
+ bl $2
+ RESTORE_CONTEXT_QUICK
+ NBIF_RET(0)
+#else
+ /* XXX: this case is always trivial; how to suppress the branch? */
QUICK_CALL_RET($2,0)
+#endif
.size $1, .-$1
.type $1, %function
#endif')
@@ -506,7 +549,14 @@ $1:
NBIF_ARG(r0,1,0)
/* Perform a quick save;call;restore;ret sequence. */
+#ifdef __thumb__
+ SAVE_CONTEXT_QUICK
+ bl $2
+ RESTORE_CONTEXT_QUICK
+ NBIF_RET(1)
+#else
QUICK_CALL_RET($2,1)
+#endif
.size $1, .-$1
.type $1, %function
#endif')
@@ -522,7 +572,14 @@ $1:
NBIF_ARG(r1,2,1)
/* Perform a quick save;call;restore;ret sequence. */
+#ifdef __thumb__
+ SAVE_CONTEXT_QUICK
+ bl $2
+ RESTORE_CONTEXT_QUICK
+ NBIF_RET(2)
+#else
QUICK_CALL_RET($2,2)
+#endif
.size $1, .-$1
.type $1, %function
#endif')
@@ -539,7 +596,14 @@ $1:
NBIF_ARG(r2,3,2)
/* Perform a quick save;call;restore;ret sequence. */
+#ifdef __thumb__
+ SAVE_CONTEXT_QUICK
+ bl $2
+ RESTORE_CONTEXT_QUICK
+ NBIF_RET(3)
+#else
QUICK_CALL_RET($2,3)
+#endif
.size $1, .-$1
.type $1, %function
#endif')
@@ -559,7 +623,14 @@ $1:
str r4, [sp, #0]
/* Perform a quick save;call;restore;ret sequence. */
+#ifdef __thumb__
+ SAVE_CONTEXT_QUICK
+ bl $2
+ RESTORE_CONTEXT_QUICK
+ NBIF_RET(5)
+#else
QUICK_CALL_RET($2,5)
+#endif
.size $1, .-$1
.type $1, %function
#endif')
diff --git a/erts/emulator/hipe/hipe_arm_glue.S b/erts/emulator/hipe/hipe_arm_glue.S
index 069cb4512e..e7ff267606 100644
--- a/erts/emulator/hipe/hipe_arm_glue.S
+++ b/erts/emulator/hipe/hipe_arm_glue.S
@@ -24,6 +24,7 @@
.text
.p2align 2
+ .arm
/*
* Enter Erlang from C.
@@ -69,6 +70,7 @@
* Emulated code recursively calls native code.
*/
.global hipe_arm_call_to_native
+ .type hipe_arm_call_to_native, %function
hipe_arm_call_to_native:
ENTER_FROM_C
/* get argument registers */
@@ -84,6 +86,7 @@ hipe_arm_call_to_native:
* This is where native code returns to emulated code.
*/
.global nbif_return
+ .type nbif_return, %function
nbif_return:
str r0, [P, #P_ARG0] /* save retval */
mov r0, #HIPE_MODE_SWITCH_RES_RETURN
@@ -94,6 +97,7 @@ nbif_return:
* Emulated code returns to its native code caller.
*/
.global hipe_arm_return_to_native
+ .type hipe_arm_return_to_native, %function
hipe_arm_return_to_native:
ENTER_FROM_C
/* get return value */
@@ -110,6 +114,7 @@ hipe_arm_return_to_native:
* Emulated code tailcalls native code.
*/
.global hipe_arm_tailcall_to_native
+ .type hipe_arm_tailcall_to_native, %function
hipe_arm_tailcall_to_native:
ENTER_FROM_C
/* get argument registers */
@@ -124,6 +129,7 @@ hipe_arm_tailcall_to_native:
* Emulated code throws an exception to its native code caller.
*/
.global hipe_arm_throw_to_native
+ .type hipe_arm_throw_to_native, %function
hipe_arm_throw_to_native:
ENTER_FROM_C
/* invoke the handler */
@@ -141,6 +147,7 @@ hipe_arm_throw_to_native:
* XXX: Different stubs for different number of register parameters?
*/
.global nbif_callemu
+ .type nbif_callemu, %function
nbif_callemu:
str r8, [P, #P_CALLEE_EXP]
str r0, [P, #P_ARITY]
@@ -152,6 +159,7 @@ nbif_callemu:
* nbif_apply
*/
.global nbif_apply
+ .type nbif_apply, %function
nbif_apply:
STORE_ARG_REGS
mov r0, #HIPE_MODE_SWITCH_RES_APPLY
@@ -168,6 +176,7 @@ nbif_apply:
*/
#if NR_ARG_REGS >= 6
.global nbif_ccallemu6
+ .type nbif_ccallemu6, %function
nbif_ccallemu6:
str ARG5, [P, #P_ARG5]
#if NR_ARG_REGS > 6
@@ -180,6 +189,7 @@ nbif_ccallemu6:
#if NR_ARG_REGS >= 5
.global nbif_ccallemu5
+ .type nbif_ccallemu5, %function
nbif_ccallemu5:
str ARG4, [P, #P_ARG4]
#if NR_ARG_REGS > 5
@@ -192,6 +202,7 @@ nbif_ccallemu5:
#if NR_ARG_REGS >= 4
.global nbif_ccallemu4
+ .type nbif_ccallemu4, %function
nbif_ccallemu4:
str ARG3, [P, #P_ARG3]
#if NR_ARG_REGS > 4
@@ -204,6 +215,7 @@ nbif_ccallemu4:
#if NR_ARG_REGS >= 3
.global nbif_ccallemu3
+ .type nbif_ccallemu3, %function
nbif_ccallemu3:
str ARG2, [P, #P_ARG2]
#if NR_ARG_REGS > 3
@@ -216,6 +228,7 @@ nbif_ccallemu3:
#if NR_ARG_REGS >= 2
.global nbif_ccallemu2
+ .type nbif_ccallemu2, %function
nbif_ccallemu2:
str ARG1, [P, #P_ARG1]
#if NR_ARG_REGS > 2
@@ -228,6 +241,7 @@ nbif_ccallemu2:
#if NR_ARG_REGS >= 1
.global nbif_ccallemu1
+ .type nbif_ccallemu1, %function
nbif_ccallemu1:
str ARG0, [P, #P_ARG0]
#if NR_ARG_REGS > 1
@@ -239,6 +253,7 @@ nbif_ccallemu1:
#endif
.global nbif_ccallemu0
+ .type nbif_ccallemu0, %function
nbif_ccallemu0:
/* We use r1 not ARG0 here because ARG0 is not
defined when NR_ARG_REGS == 0. */
@@ -253,6 +268,7 @@ nbif_ccallemu0:
* This is where native code suspends.
*/
.global nbif_suspend_0
+ .type nbif_suspend_0, %function
nbif_suspend_0:
mov r0, #HIPE_MODE_SWITCH_RES_SUSPEND
b .suspend_exit
@@ -261,6 +277,7 @@ nbif_suspend_0:
* Suspend from a receive (waiting for a message)
*/
.global nbif_suspend_msg
+ .type nbif_suspend_msg, %function
nbif_suspend_msg:
mov r0, #HIPE_MODE_SWITCH_RES_WAIT
b .suspend_exit
@@ -271,6 +288,7 @@ nbif_suspend_msg:
* else { return 0; }
*/
.global nbif_suspend_msg_timeout
+ .type nbif_suspend_msg_timeout, %function
nbif_suspend_msg_timeout:
ldr r1, [P, #P_FLAGS]
mov r0, #HIPE_MODE_SWITCH_RES_WAIT_TIMEOUT
@@ -285,23 +303,31 @@ nbif_suspend_msg_timeout:
* This is the default exception handler for native code.
*/
.global nbif_fail
+ .type nbif_fail, %function
nbif_fail:
mov r0, #HIPE_MODE_SWITCH_RES_THROW
b .flush_exit /* no need to save RA */
.global nbif_0_gc_after_bif
- .global nbif_1_gc_after_bif
- .global nbif_2_gc_after_bif
- .global nbif_3_gc_after_bif
+ .type nbif_0_gc_after_bif, %function
nbif_0_gc_after_bif:
mov r1, #0
b .gc_after_bif
+
+ .global nbif_1_gc_after_bif
+ .type nbif_1_gc_after_bif, %function
nbif_1_gc_after_bif:
mov r1, #1
b .gc_after_bif
+
+ .global nbif_2_gc_after_bif
+ .type nbif_2_gc_after_bif, %function
nbif_2_gc_after_bif:
mov r1, #2
b .gc_after_bif
+
+ .global nbif_3_gc_after_bif
+ .type nbif_3_gc_after_bif, %function
nbif_3_gc_after_bif:
mov r1, #3
/*FALLTHROUGH*/
@@ -329,18 +355,25 @@ nbif_3_gc_after_bif:
* TEMP_LR contains a copy of LR
*/
.global nbif_0_simple_exception
+ .type nbif_0_simple_exception, %function
nbif_0_simple_exception:
mov r1, #0
b .nbif_simple_exception
+
.global nbif_1_simple_exception
+ .type nbif_1_simple_exception, %function
nbif_1_simple_exception:
mov r1, #1
b .nbif_simple_exception
+
.global nbif_2_simple_exception
+ .type nbif_2_simple_exception, %function
nbif_2_simple_exception:
mov r1, #2
b .nbif_simple_exception
+
.global nbif_3_simple_exception
+ .type nbif_3_simple_exception, %function
nbif_3_simple_exception:
mov r1, #3
/*FALLTHROUGH*/
@@ -384,6 +417,7 @@ nbif_3_simple_exception:
* the gray/white stack boundary
*/
.global nbif_stack_trap_ra
+ .type nbif_stack_trap_ra, %function
nbif_stack_trap_ra: /* a return address, not a function */
# This only handles a single return value.
# If we have more, we need to save them in the PCB.
@@ -400,6 +434,7 @@ nbif_stack_trap_ra: /* a return address, not a function */
* Caller saved its LR in TEMP_LR (== TEMP1) before calling us.
*/
.global hipe_arm_inc_stack
+ .type hipe_arm_inc_stack, %function
hipe_arm_inc_stack:
STORE_ARG_REGS
mov TEMP_ARG0, lr
diff --git a/erts/emulator/hipe/hipe_bif0.c b/erts/emulator/hipe/hipe_bif0.c
index 9eb0b88ced..9e5830f345 100644
--- a/erts/emulator/hipe/hipe_bif0.c
+++ b/erts/emulator/hipe/hipe_bif0.c
@@ -397,15 +397,17 @@ BIF_RETTYPE hipe_bifs_enter_code_2(BIF_ALIST_2)
ASSERT(bitoffs == 0);
ASSERT(bitsize == 0);
trampolines = NIL;
-#ifdef HIPE_ALLOC_CODE
- address = HIPE_ALLOC_CODE(nrbytes, BIF_ARG_2, &trampolines, BIF_P);
- if (!address)
- BIF_ERROR(BIF_P, BADARG);
-#else
- if (is_not_nil(BIF_ARG_2))
- BIF_ERROR(BIF_P, BADARG);
- address = erts_alloc(ERTS_ALC_T_HIPE, nrbytes);
-#endif
+ address = hipe_alloc_code(nrbytes, BIF_ARG_2, &trampolines, BIF_P);
+ if (!address) {
+ Uint nrcallees;
+
+ if (is_tuple(BIF_ARG_2))
+ nrcallees = arityval(tuple_val(BIF_ARG_2)[0]);
+ else
+ nrcallees = 0;
+ erl_exit(1, "%s: failed to allocate %lu bytes and %lu trampolines\r\n",
+ __func__, (unsigned long)nrbytes, (unsigned long)nrcallees);
+ }
memcpy(address, bytes, nrbytes);
hipe_flush_icache_range(address, nrbytes);
hp = HAlloc(BIF_P, 3);
@@ -1280,6 +1282,8 @@ static void *hipe_make_stub(Eterm m, Eterm f, unsigned int arity, int is_remote)
export_entry = erts_export_get_or_make_stub(m, f, arity);
StubAddress = hipe_make_native_stub(export_entry, arity);
+ if (!StubAddress)
+ erl_exit(1, "hipe_make_stub: code allocation failed\r\n");
return StubAddress;
}
diff --git a/erts/emulator/hipe/hipe_bif0.tab b/erts/emulator/hipe/hipe_bif0.tab
index 2514b1c3a5..d715a0914b 100644
--- a/erts/emulator/hipe/hipe_bif0.tab
+++ b/erts/emulator/hipe/hipe_bif0.tab
@@ -49,7 +49,6 @@ bif hipe_bifs:constants_size/0
bif hipe_bifs:merge_term/1
bif hipe_bifs:fun_to_address/1
-#bif hipe_bifs:get_emu_address/1
bif hipe_bifs:set_native_address/3
#bif hipe_bifs:address_to_fun/1
@@ -72,7 +71,6 @@ bif hipe_bifs:term_to_word/1
bif hipe_bifs:get_fe/2
bif hipe_bifs:set_native_address_in_fe/2
-#bif hipe_bifs:make_native_stub/2
bif hipe_bifs:find_na_or_make_stub/2
bif hipe_bifs:check_crc/1
diff --git a/erts/emulator/hipe/hipe_bif1.c b/erts/emulator/hipe/hipe_bif1.c
index 56767ef04b..ecb34df412 100644
--- a/erts/emulator/hipe/hipe_bif1.c
+++ b/erts/emulator/hipe/hipe_bif1.c
@@ -574,22 +574,6 @@ BIF_RETTYPE hipe_bifs_pause_times_0(BIF_ALIST_0)
/* XXX: these macros have free variables */
#ifdef BM_TIMERS
-#if USE_PERFCTR
-#define MAKE_TIME(_timer_) { \
- BM_TIMER_T tmp = _timer_##_time; \
- milli = (uint)(tmp - ((int)(tmp / 1000)) * 1000); \
- tmp /= 1000; \
- sec = (uint)(tmp - ((int)(tmp / 60)) * 60); \
- min = (uint)tmp / 60; }
-
-#define MAKE_MICRO_TIME(_timer_) { \
- BM_TIMER_T tmp = _timer_##_time * 1000; \
- micro = (uint)(tmp - ((int)(tmp / 1000)) * 1000); \
- tmp /= 1000; \
- milli = (uint)(tmp - ((int)(tmp / 1000)) * 1000); \
- sec = (uint)tmp / 1000; }
-
-#else
#define MAKE_TIME(_timer_) { \
BM_TIMER_T tmp = _timer_##_time / 1000000; \
milli = tmp % 1000; \
@@ -604,7 +588,6 @@ BIF_RETTYPE hipe_bifs_pause_times_0(BIF_ALIST_0)
milli = tmp % 1000; \
sec = tmp / 1000; }
-#endif
#else
#define MAKE_TIME(_timer_)
#define MAKE_MICRO_TIME(_timer_)
@@ -852,9 +835,6 @@ BIF_RETTYPE hipe_bifs_misc_timer_clear_0(BIF_ALIST_0)
/*
* HiPE hrvtime().
* These implementations are currently available:
- * + On Linux with the perfctr extension we can use the process'
- * virtualised time-stamp counter. To enable this mode you must
- * pass `--with-perfctr=/path/to/perfctr' when configuring.
* + The fallback, which is the same as {X,_} = runtime(statistics).
*/
@@ -866,37 +846,6 @@ static double fallback_get_hrvtime(void)
return (double)ms_user;
}
-#if USE_PERFCTR
-
-#include "hipe_perfctr.h"
-static int hrvtime_started; /* 0: closed, +1: perfctr, -1: fallback */
-#define hrvtime_is_started() (hrvtime_started != 0)
-
-static void start_hrvtime(void)
-{
- if (hipe_perfctr_hrvtime_open() >= 0)
- hrvtime_started = 1;
- else
- hrvtime_started = -1;
-}
-
-static void stop_hrvtime(void)
-{
- if (hrvtime_started > 0)
- hipe_perfctr_hrvtime_close();
- hrvtime_started = 0;
-}
-
-static double get_hrvtime(void)
-{
- if (hrvtime_started > 0)
- return hipe_perfctr_hrvtime_get();
- else
- return fallback_get_hrvtime();
-}
-
-#else /* !USE_PERFCTR */
-
/*
* Fallback, if nothing better exists.
* This is the same as {X,_} = statistics(runtime), which uses
@@ -908,8 +857,6 @@ static double get_hrvtime(void)
#define stop_hrvtime() do{}while(0)
#define get_hrvtime() fallback_get_hrvtime()
-#endif /* !USE_PERFCTR */
-
BIF_RETTYPE hipe_bifs_get_hrvtime_0(BIF_ALIST_0)
{
Eterm *hp;
diff --git a/erts/emulator/hipe/hipe_perfctr.c b/erts/emulator/hipe/hipe_perfctr.c
deleted file mode 100644
index 371b3fb097..0000000000
--- a/erts/emulator/hipe/hipe_perfctr.c
+++ /dev/null
@@ -1,229 +0,0 @@
-/*
- * %CopyrightBegin%
- *
- * Copyright Ericsson AB 2004-2011. All Rights Reserved.
- *
- * The contents of this file are subject to the Erlang Public License,
- * Version 1.1, (the "License"); you may not use this file except in
- * compliance with the License. You should have received a copy of the
- * Erlang Public License along with this software. If not, it can be
- * retrieved online at http://www.erlang.org/.
- *
- * Software distributed under the License is distributed on an "AS IS"
- * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
- * the License for the specific language governing rights and limitations
- * under the License.
- *
- * %CopyrightEnd%
- */
-
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-#include "sys.h"
-#include "error.h"
-#include "global.h"
-#include "bif.h"
-#include "big.h"
-#include "erl_binary.h"
-#include "hipe_perfctr.h"
-#include "libperfctr.h"
-
-static struct vperfctr *vperfctr;
-static unsigned int have_rdtsc;
-static double tsc_to_ms;
-static unsigned int tsc_on; /* control calls must set tsc_on if have_rdtsc is true */
-static unsigned int nractrs;
-static unsigned int users;
-#define USER_BIFS (1<<0)
-#define USER_HRVTIME (1<<1)
-
-static int hipe_perfctr_open(unsigned int user)
-{
- struct perfctr_info info;
-
- if (!vperfctr) {
- vperfctr = vperfctr_open();
- if (!vperfctr)
- return -1;
- if (vperfctr_info(vperfctr, &info) >= 0) {
- tsc_to_ms = (double)(info.tsc_to_cpu_mult ? : 1) / (double)info.cpu_khz;
- have_rdtsc = (info.cpu_features & PERFCTR_FEATURE_RDTSC) ? 1 : 0;
- }
- tsc_on = 0;
- nractrs = 0;
- }
- users |= user;
- return 0;
-}
-
-static void hipe_perfctr_reset(void)
-{
- struct vperfctr_control control;
-
- memset(&control, 0, sizeof control);
- if (have_rdtsc)
- control.cpu_control.tsc_on = 1;
- nractrs = 0;
- if (vperfctr_control(vperfctr, &control) >= 0)
- tsc_on = 1;
-}
-
-static void hipe_perfctr_close(unsigned int user)
-{
- if (!vperfctr)
- return;
- users &= ~user;
- switch (users) {
- case 0:
- vperfctr_unlink(vperfctr);
- vperfctr_close(vperfctr);
- vperfctr = NULL;
- tsc_on = 0;
- nractrs = 0;
- break;
- case USER_HRVTIME:
- hipe_perfctr_reset();
- }
-}
-
-/*
- * Interface for HiPE's hrvtime code.
- */
-
-int hipe_perfctr_hrvtime_open(void)
-{
- if (hipe_perfctr_open(USER_HRVTIME) < 0)
- return -1;
- if (have_rdtsc) {
- if (!tsc_on)
- hipe_perfctr_reset(); /* note: updates tsc_on */
- if (tsc_on)
- return 0;
- }
- hipe_perfctr_hrvtime_close();
- return -1;
-}
-
-void hipe_perfctr_hrvtime_close(void)
-{
- hipe_perfctr_close(USER_HRVTIME);
-}
-
-double hipe_perfctr_hrvtime_get(void)
-{
- return (double)vperfctr_read_tsc(vperfctr) * tsc_to_ms;
-}
-
-/*
- * BIF interface for user-programmable performance counters.
- */
-
-BIF_RETTYPE hipe_bifs_vperfctr_open_0(BIF_ALIST_0)
-{
- if (hipe_perfctr_open(USER_BIFS) < 0)
- BIF_RET(am_false); /* arity 0 BIFs can't fail :-( */
- BIF_RET(am_true);
-}
-
-BIF_RETTYPE hipe_bifs_vperfctr_close_0(BIF_ALIST_0)
-{
- hipe_perfctr_close(USER_BIFS);
- BIF_RET(NIL);
-}
-
-static Eterm ull_to_integer(unsigned long long x, Process *p)
-{
- unsigned long long tmpx;
- unsigned int ds, i;
- size_t sz;
- Eterm *hp;
- ErtsDigit *xp;
-
- if (x <= (unsigned long long)MAX_SMALL)
- return make_small(x);
-
- /* Calculate number of digits. */
- ds = 0;
- tmpx = x;
- do {
- ++ds;
- tmpx = (tmpx >> (D_EXP / 2)) >> (D_EXP / 2);
- } while (tmpx != 0);
-
- sz = BIG_NEED_SIZE(ds); /* number of words including arity */
- hp = HAlloc(p, sz);
- *hp = make_pos_bignum_header(sz-1);
-
- xp = (ErtsDigit*)(hp+1);
- i = 0;
- do {
- xp[i++] = (ErtsDigit)x;
- x = (x >> (D_EXP / 2)) >> (D_EXP / 2);
- } while (i < ds);
- while (i & (BIG_DIGITS_PER_WORD-1))
- xp[i++] = 0;
-
- return make_big(hp);
-}
-
-BIF_RETTYPE hipe_bifs_vperfctr_info_0(BIF_ALIST_0)
-{
- struct perfctr_info info;
-
- if (!vperfctr || vperfctr_info(vperfctr, &info) < 0)
- BIF_RET(am_false); /* arity 0 BIFs can't fail :-( */
- BIF_RET(new_binary(BIF_P, (void*)&info, sizeof info));
-}
-
-BIF_RETTYPE hipe_bifs_vperfctr_read_tsc_0(BIF_ALIST_0)
-{
- unsigned long long val;
-
- if (!vperfctr || !tsc_on)
- BIF_RET(am_false); /* arity 0 BIFs can't fail :-( */
- val = vperfctr_read_tsc(vperfctr);
- BIF_RET(ull_to_integer(val, BIF_P));
-}
-
-BIF_RETTYPE hipe_bifs_vperfctr_read_pmc_1(BIF_ALIST_1)
-{
- Uint pmc;
- unsigned long long val;
-
- if (!vperfctr ||
- is_not_small(BIF_ARG_1) ||
- (pmc = unsigned_val(BIF_ARG_1), pmc >= nractrs))
- BIF_RET(am_false); /* for consistency with the arity 0 BIFs */
- val = vperfctr_read_pmc(vperfctr, pmc);
- BIF_RET(ull_to_integer(val, BIF_P));
-}
-
-BIF_RETTYPE hipe_bifs_vperfctr_control_1(BIF_ALIST_1)
-{
- void *bytes;
- struct vperfctr_control control;
- Uint bitoffs;
- Uint bitsize;
-
- if (!vperfctr)
- BIF_ERROR(BIF_P, BADARG);
- if (is_not_binary(BIF_ARG_1))
- BIF_ERROR(BIF_P, BADARG);
- if (binary_size(BIF_ARG_1) != sizeof control)
- BIF_ERROR(BIF_P, BADARG);
- ERTS_GET_BINARY_BYTES(BIF_ARG_1, bytes, bitoffs, bitsize);
- ASSERT(bitoffs == 0);
- ASSERT(bitsize == 0);
- memcpy(&control, bytes, sizeof control);
- if (have_rdtsc)
- control.cpu_control.tsc_on = 1;
- if (vperfctr_control(vperfctr, &control) < 0) {
- hipe_perfctr_reset();
- BIF_ERROR(BIF_P, BADARG);
- }
- tsc_on = control.cpu_control.tsc_on;
- nractrs = control.cpu_control.nractrs;
- BIF_RET(NIL);
-}
diff --git a/erts/emulator/hipe/hipe_perfctr.h b/erts/emulator/hipe/hipe_perfctr.h
deleted file mode 100644
index 8fbf9ecf35..0000000000
--- a/erts/emulator/hipe/hipe_perfctr.h
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * %CopyrightBegin%
- *
- * Copyright Ericsson AB 2004-2011. All Rights Reserved.
- *
- * The contents of this file are subject to the Erlang Public License,
- * Version 1.1, (the "License"); you may not use this file except in
- * compliance with the License. You should have received a copy of the
- * Erlang Public License along with this software. If not, it can be
- * retrieved online at http://www.erlang.org/.
- *
- * Software distributed under the License is distributed on an "AS IS"
- * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
- * the License for the specific language governing rights and limitations
- * under the License.
- *
- * %CopyrightEnd%
- */
-
-
-extern int hipe_perfctr_hrvtime_open(void);
-extern void hipe_perfctr_hrvtime_close(void);
-extern double hipe_perfctr_hrvtime_get(void);
diff --git a/erts/emulator/hipe/hipe_perfctr.tab b/erts/emulator/hipe/hipe_perfctr.tab
deleted file mode 100644
index eaecea4651..0000000000
--- a/erts/emulator/hipe/hipe_perfctr.tab
+++ /dev/null
@@ -1,25 +0,0 @@
-#
-# %CopyrightBegin%
-#
-# Copyright Ericsson AB 2004-2011. All Rights Reserved.
-#
-# The contents of this file are subject to the Erlang Public License,
-# Version 1.1, (the "License"); you may not use this file except in
-# compliance with the License. You should have received a copy of the
-# Erlang Public License along with this software. If not, it can be
-# retrieved online at http://www.erlang.org/.
-#
-# Software distributed under the License is distributed on an "AS IS"
-# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-# the License for the specific language governing rights and limitations
-# under the License.
-#
-# %CopyrightEnd%
-#
-
-bif hipe_bifs:vperfctr_open/0
-bif hipe_bifs:vperfctr_close/0
-bif hipe_bifs:vperfctr_info/0
-bif hipe_bifs:vperfctr_read_tsc/0
-bif hipe_bifs:vperfctr_read_pmc/1
-bif hipe_bifs:vperfctr_control/1
diff --git a/erts/emulator/hipe/hipe_ppc.c b/erts/emulator/hipe/hipe_ppc.c
index 4dc26cdbc8..1eaa9f6855 100644
--- a/erts/emulator/hipe/hipe_ppc.c
+++ b/erts/emulator/hipe/hipe_ppc.c
@@ -293,6 +293,8 @@ void *hipe_make_native_stub(void *callee_exp, unsigned int beamArity)
abort();
code = alloc_stub(7);
+ if (!code)
+ return NULL;
/* addis r12,0,callee_exp@highest */
code[0] = 0x3d800000 | (((unsigned long)callee_exp >> 48) & 0xffff);
@@ -381,6 +383,8 @@ void *hipe_make_native_stub(void *callee_exp, unsigned int beamArity)
abort();
code = alloc_stub(4);
+ if (!code)
+ return NULL;
/* addi r12,0,callee_exp@l */
code[0] = 0x39800000 | ((unsigned long)callee_exp & 0xFFFF);
diff --git a/erts/emulator/hipe/hipe_ppc.h b/erts/emulator/hipe/hipe_ppc.h
index 66000c1846..e9d3e6564b 100644
--- a/erts/emulator/hipe/hipe_ppc.h
+++ b/erts/emulator/hipe/hipe_ppc.h
@@ -64,10 +64,6 @@ AEXTERN(void,hipe_ppc_inc_stack,(void));
extern void hipe_ppc_inc_stack(void); /* we don't have the AEXTERN() fallback :-( */
#endif
-/* for hipe_bifs_enter_code_2 */
-extern void *hipe_alloc_code(Uint nrbytes, Eterm callees, Eterm *trampolines, Process *p);
-#define HIPE_ALLOC_CODE(n,c,t,p) hipe_alloc_code((n),(c),(t),(p))
-
#if !defined(__powerpc64__)
extern const unsigned int fconv_constant[];
#endif
diff --git a/erts/emulator/hipe/hipe_sparc.c b/erts/emulator/hipe/hipe_sparc.c
index 2052aa8498..fea3b623a9 100644
--- a/erts/emulator/hipe/hipe_sparc.c
+++ b/erts/emulator/hipe/hipe_sparc.c
@@ -130,7 +130,7 @@ static void atexit_alloc_code_stats(void)
#define ALLOC_CODE_STATS(X) do{}while(0)
#endif
-static void morecore(unsigned int alloc_bytes)
+static int morecore(unsigned int alloc_bytes)
{
unsigned int map_bytes;
char *map_hint, *map_start;
@@ -158,10 +158,9 @@ static void morecore(unsigned int alloc_bytes)
#endif
,
-1, 0);
- if (map_start == MAP_FAILED) {
- perror("mmap");
- abort();
- }
+ if (map_start == MAP_FAILED)
+ return -1;
+
ALLOC_CODE_STATS(total_mapped += map_bytes);
/* Merge adjacent mappings, so the trailing portion of the previous
@@ -177,6 +176,8 @@ static void morecore(unsigned int alloc_bytes)
}
ALLOC_CODE_STATS(atexit_alloc_code_stats());
+
+ return 0;
}
static void *alloc_code(unsigned int alloc_bytes)
@@ -186,8 +187,8 @@ static void *alloc_code(unsigned int alloc_bytes)
/* Align function entries. */
alloc_bytes = (alloc_bytes + 3) & ~3;
- if (code_bytes < alloc_bytes)
- morecore(alloc_bytes);
+ if (code_bytes < alloc_bytes && morecore(alloc_bytes) != 0)
+ return NULL;
ALLOC_CODE_STATS(++nr_allocs);
ALLOC_CODE_STATS(total_alloc += alloc_bytes);
res = code_next;
@@ -211,6 +212,8 @@ void *hipe_make_native_stub(void *callee_exp, unsigned int beamArity)
int i;
code = alloc_code(5*sizeof(int));
+ if (!code)
+ return NULL;
/* sethi %hi(Address), %i4 */
code[0] = 0x39000000 | (((unsigned int)callee_exp >> 10) & 0x3FFFFF);
diff --git a/erts/emulator/hipe/hipe_sparc.h b/erts/emulator/hipe/hipe_sparc.h
index 1134b86004..2d92ca3ca8 100644
--- a/erts/emulator/hipe/hipe_sparc.h
+++ b/erts/emulator/hipe/hipe_sparc.h
@@ -47,8 +47,4 @@ static __inline__ int hipe_word32_address_ok(void *address)
extern void hipe_sparc_inc_stack(void);
-/* for hipe_bifs_enter_code_2 */
-extern void *hipe_alloc_code(Uint nrbytes, Eterm callees, Eterm *trampolines, Process *p);
-#define HIPE_ALLOC_CODE(n,c,t,p) hipe_alloc_code((n),(c),(t),(p))
-
#endif /* HIPE_SPARC_H */
diff --git a/erts/emulator/hipe/hipe_x86.c b/erts/emulator/hipe/hipe_x86.c
index 314f6b597c..998905ea63 100644
--- a/erts/emulator/hipe/hipe_x86.c
+++ b/erts/emulator/hipe/hipe_x86.c
@@ -108,7 +108,7 @@ static void atexit_alloc_code_stats(void)
#define MAP_ANONYMOUS MAP_ANON
#endif
-static void morecore(unsigned int alloc_bytes)
+static int morecore(unsigned int alloc_bytes)
{
unsigned int map_bytes;
char *map_hint, *map_start;
@@ -136,10 +136,9 @@ static void morecore(unsigned int alloc_bytes)
#endif
,
-1, 0);
- if (map_start == MAP_FAILED) {
- perror("mmap");
- abort();
- }
+ if (map_start == MAP_FAILED)
+ return -1;
+
ALLOC_CODE_STATS(total_mapped += map_bytes);
/* Merge adjacent mappings, so the trailing portion of the previous
@@ -155,6 +154,8 @@ static void morecore(unsigned int alloc_bytes)
}
ALLOC_CODE_STATS(atexit_alloc_code_stats());
+
+ return 0;
}
static void *alloc_code(unsigned int alloc_bytes)
@@ -164,8 +165,8 @@ static void *alloc_code(unsigned int alloc_bytes)
/* Align function entries. */
alloc_bytes = (alloc_bytes + 3) & ~3;
- if (code_bytes < alloc_bytes)
- morecore(alloc_bytes);
+ if (code_bytes < alloc_bytes && morecore(alloc_bytes) != 0)
+ return NULL;
ALLOC_CODE_STATS(++nr_allocs);
ALLOC_CODE_STATS(total_alloc += alloc_bytes);
res = code_next;
@@ -207,6 +208,8 @@ void *hipe_make_native_stub(void *callee_exp, unsigned int beamArity)
(P_CALLEE_EXP >= 128 ? 3 : 0) +
(P_ARITY >= 128 ? 3 : 0);
codep = code = alloc_code(codeSize);
+ if (!code)
+ return NULL;
/* movl $beamAddress, P_CALLEE_EXP(%ebp); 3 or 6 bytes, plus 4 */
codep[0] = 0xc7;
diff --git a/erts/emulator/hipe/hipe_x86.h b/erts/emulator/hipe/hipe_x86.h
index 97f09e38cd..f29117d0c4 100644
--- a/erts/emulator/hipe/hipe_x86.h
+++ b/erts/emulator/hipe/hipe_x86.h
@@ -53,8 +53,4 @@ extern void nbif_inc_stack_0(void);
extern void nbif_handle_fp_exception(void);
#endif
-/* for hipe_bifs_enter_code_2 */
-extern void *hipe_alloc_code(Uint nrbytes, Eterm callees, Eterm *trampolines, Process *p);
-#define HIPE_ALLOC_CODE(n,c,t,p) hipe_alloc_code((n),(c),(t),(p))
-
#endif /* HIPE_X86_H */
diff --git a/erts/emulator/sys/unix/erl_child_setup.c b/erts/emulator/sys/unix/erl_child_setup.c
index 94eb6b1547..5ad92dad02 100644
--- a/erts/emulator/sys/unix/erl_child_setup.c
+++ b/erts/emulator/sys/unix/erl_child_setup.c
@@ -101,7 +101,9 @@ main(int argc, char *argv[])
if (sscanf(argv[CS_ARGV_FD_CR_IX], "%d:%d", &from, &to) != 2)
return 1;
-#if defined(__ANDROID__)
+#if defined(HAVE_CLOSEFROM)
+ closefrom(from);
+#elif defined(__ANDROID__)
for (i = from; i <= to; i++) {
if (i!=__system_properties_fd)
(void) close(i);
@@ -109,13 +111,6 @@ main(int argc, char *argv[])
#else
for (i = from; i <= to; i++)
(void) close(i);
-#endif /* __ANDROID__ */
-
-#if defined(HAVE_CLOSEFROM)
- closefrom(from);
-#else
- for (i = from; i <= to; i++)
- (void) close(i);
#endif
if (!(argv[CS_ARGV_WD_IX][0] == '.' && argv[CS_ARGV_WD_IX][1] == '\0')
@@ -147,8 +142,6 @@ main(int argc, char *argv[])
return 1;
}
-
-
#if defined(__ANDROID__)
int __system_properties_fd(void)
{
diff --git a/erts/emulator/sys/unix/sys.c b/erts/emulator/sys/unix/sys.c
index 6248651882..0d9c743c0c 100644
--- a/erts/emulator/sys/unix/sys.c
+++ b/erts/emulator/sys/unix/sys.c
@@ -227,8 +227,7 @@ static int sig_notify_fds[2] = {-1, -1};
static int sig_suspend_fds[2] = {-1, -1};
#define ERTS_SYS_SUSPEND_SIGNAL SIGUSR2
-#elif defined(USE_THREADS)
-static int async_fd[2];
+
#endif
jmp_buf erts_sys_sigsegv_jmp;
@@ -273,6 +272,8 @@ static void note_child_death(int, int);
static void* child_waiter(void *);
#endif
+static int crashdump_companion_cube_fd = -1;
+
/********************* General functions ****************************/
/* This is used by both the drivers and general I/O, must be set early */
@@ -602,6 +603,14 @@ erts_sys_pre_init(void)
close(fd);
}
+ /* We need a file descriptor to close in the crashdump creation.
+ * We close this one to be sure we can get a fd for our real file ...
+ * so, we create one here ... a stone to carry all the way home.
+ */
+
+ crashdump_companion_cube_fd = open("/dev/null", O_RDONLY);
+
+ /* don't lose it, there will be cake */
}
void
@@ -726,14 +735,13 @@ static ERTS_INLINE int
prepare_crash_dump(int secs)
{
#define NUFBUF (3)
- int i, max;
+ int i;
char env[21]; /* enough to hold any 64-bit integer */
size_t envsz;
DeclareTmpHeapNoproc(heap,NUFBUF);
Port *heart_port;
Eterm *hp = heap;
Eterm list = NIL;
- int heart_fd[2] = {-1,-1};
int has_heart = 0;
UseTmpHeapNoproc(NUFBUF);
@@ -756,46 +764,22 @@ prepare_crash_dump(int secs)
alarm((unsigned int)secs);
}
- if (heart_port) {
- /* hearts input fd
- * We "know" drv_data is the in_fd since the port is started with read|write
- */
- heart_fd[0] = (int)heart_port->drv_data;
- heart_fd[1] = (int)driver_data[heart_fd[0]].ofd;
- has_heart = 1;
+ /* close all viable sockets via emergency close callbacks.
+ * Specifically we want to close epmd sockets.
+ */
- list = CONS(hp, make_small(8), list); hp += 2;
+ erts_emergency_close_ports();
+ if (heart_port) {
+ has_heart = 1;
+ list = CONS(hp, make_small(8), list); hp += 2;
/* send to heart port, CMD = 8, i.e. prepare crash dump =o */
erts_port_output(NULL, ERTS_PORT_SIG_FLG_FORCE_IMM_CALL, heart_port,
heart_port->common.id, list, NULL);
}
- /* Make sure we unregister at epmd (unknown fd) and get at least
- one free filedescriptor (for erl_crash.dump) */
-
- max = max_files;
- if (max < 1024)
- max = 1024;
- for (i = 3; i < max; i++) {
-#if defined(ERTS_SMP)
- /* We don't want to close the signal notification pipe... */
- if (i == sig_notify_fds[0] || i == sig_notify_fds[1])
- continue;
- /* We don't want to close the signal syspend pipe... */
- if (i == sig_suspend_fds[0] || i == sig_suspend_fds[1])
- continue;
-#elif defined(USE_THREADS)
- /* We don't want to close the async notification pipe... */
- if (i == async_fd[0] || i == async_fd[1])
- continue;
-#endif
- /* We don't want to close our heart yet ... */
- if (i == heart_fd[0] || i == heart_fd[1])
- continue;
-
- close(i);
- }
+ /* Make sure we have a fd for our crashdump file. */
+ close(crashdump_companion_cube_fd);
envsz = sizeof(env);
i = erts_sys_getenv__("ERL_CRASH_DUMP_NICE", env, &envsz);
@@ -1645,9 +1629,13 @@ static ErlDrvData spawn_start(ErlDrvPort port_num, char* name, SysDriverOpts* op
goto child_error;
}
+#if defined(HAVE_CLOSEFROM)
+ closefrom(opts->use_stdio ? 3 : 5);
+#else
for (i = opts->use_stdio ? 3 : 5; i < max_files; i++)
(void) close(i);
-
+#endif
+
if (opts->wd && chdir(opts->wd) < 0)
goto child_error;
diff --git a/erts/emulator/test/trace_bif_SUITE.erl b/erts/emulator/test/trace_bif_SUITE.erl
index 2c78aa394f..063e348836 100644
--- a/erts/emulator/test/trace_bif_SUITE.erl
+++ b/erts/emulator/test/trace_bif_SUITE.erl
@@ -260,7 +260,9 @@ bif_process() ->
apply(erlang, Name, Args),
bif_process();
{do_time_bif} ->
- _ = time(), %Assignment tells compiler to keep call.
+ %% Match the return value to ensure that the time() call
+ %% is not optimized away.
+ {_,_,_} = time(),
bif_process();
{do_statistics_bif} ->
statistics(runtime),
diff --git a/lib/asn1/src/Makefile b/lib/asn1/src/Makefile
index 6798da0072..40f440423d 100644
--- a/lib/asn1/src/Makefile
+++ b/lib/asn1/src/Makefile
@@ -206,6 +206,7 @@ $(EBIN)/asn1ct_constructed_per.beam: asn1ct_constructed_per.erl asn1_records.hrl
$(EBIN)/asn1ct_func.beam: asn1ct_func.erl
$(EBIN)/asn1ct_gen.beam: asn1ct_gen.erl asn1_records.hrl
$(EBIN)/asn1ct_gen_ber_bin_v2.beam: asn1ct_gen_ber_bin_v2.erl asn1_records.hrl
+$(EBIN)/asn1ct_gen_check.beam: asn1_records.hrl
$(EBIN)/asn1ct_gen_per.beam: asn1ct_gen_per.erl asn1_records.hrl
$(EBIN)/asn1ct_gen_per_rt2ct.beam: asn1ct_gen_per_rt2ct.erl asn1_records.hrl
$(EBIN)/asn1ct_imm.beam: asn1ct_imm.erl
diff --git a/lib/asn1/src/asn1_db.erl b/lib/asn1/src/asn1_db.erl
index 48d9dd16d7..5577969727 100644
--- a/lib/asn1/src/asn1_db.erl
+++ b/lib/asn1/src/asn1_db.erl
@@ -19,7 +19,8 @@
%%
-module(asn1_db).
--export([dbstart/1,dbnew/2,dbload/1,dbload/3,dbsave/2,dbput/3,dbget/2]).
+-export([dbstart/1,dbnew/2,dbload/1,dbload/3,dbsave/2,dbput/2,
+ dbput/3,dbget/2]).
-export([dbstop/0]).
-record(state, {parent, monitor, includes, table}).
@@ -44,6 +45,7 @@ dbload(Module) ->
dbnew(Module, Erule) -> req({new, Module, Erule}).
dbsave(OutFile, Module) -> cast({save, OutFile, Module}).
dbput(Module, K, V) -> cast({set, Module, K, V}).
+dbput(Module, Kvs) -> cast({set, Module, Kvs}).
dbget(Module, K) -> req({get, Module, K}).
dbstop() -> Resp = req(stop), erase(?MODULE), Resp.
@@ -82,6 +84,10 @@ loop(#state{parent = Parent, monitor = MRef, table = Table,
[{_, Modtab}] = ets:lookup(Table, Mod),
ets:insert(Modtab, {K2, V}),
loop(State);
+ {set, Mod, Kvs} ->
+ [{_, Modtab}] = ets:lookup(Table, Mod),
+ ets:insert(Modtab, Kvs),
+ loop(State);
{From, {get, Mod, K2}} ->
%% XXX If there is no information for Mod, get_table/3
%% will attempt to load information from an .asn1db
diff --git a/lib/asn1/src/asn1_records.hrl b/lib/asn1/src/asn1_records.hrl
index 6c1cf1b12a..84435b2d21 100644
--- a/lib/asn1/src/asn1_records.hrl
+++ b/lib/asn1/src/asn1_records.hrl
@@ -81,9 +81,19 @@
module :: atom(),
val :: atom()}).
--record(state,{module,mname,type,tname,value,vname,erule,parameters=[],
- inputmodules,abscomppath=[],recordtopname=[],options,
- sourcedir}).
+-record(state,
+ {module,
+ mname,
+ tname,
+ erule,
+ parameters=[],
+ inputmodules=[],
+ abscomppath=[],
+ recordtopname=[],
+ options,
+ sourcedir,
+ error_context %Top-level thingie (contains line numbers)
+ }).
%% state record used by back-end at partial decode
%% active is set to 'yes' when a partial decode function is generated.
diff --git a/lib/asn1/src/asn1ct.erl b/lib/asn1/src/asn1ct.erl
index df341e5aab..a26d63c97d 100644
--- a/lib/asn1/src/asn1ct.erl
+++ b/lib/asn1/src/asn1ct.erl
@@ -34,7 +34,8 @@
%% Application internal exports
-export([compile_asn/3,compile_asn1/3,compile_py/3,compile/3,
vsn/0,
- get_name_of_def/1,get_pos_of_def/1]).
+ get_name_of_def/1,get_pos_of_def/1,
+ unset_pos_mod/1]).
-export([read_config_data/1,get_gen_state_field/1,
partial_inc_dec_toptype/1,update_gen_state/2,
get_tobe_refed_func/1,reset_gen_state/0,is_function_generated/1,
@@ -166,46 +167,26 @@ set_scan_parse_pass(#st{files=Files}=St) ->
{error,St#st{error=Error}}
end.
-set_scan_parse_pass_1([F|Fs], St) ->
+set_scan_parse_pass_1([F|Fs], #st{file=File}=St) ->
case asn1ct_tok:file(F) of
{error,Error} ->
throw(Error);
Tokens when is_list(Tokens) ->
- case catch asn1ct_parser2:parse(Tokens) of
+ case asn1ct_parser2:parse(File, Tokens) of
{ok,M} ->
[M|set_scan_parse_pass_1(Fs, St)];
- {error,ErrorTerm} ->
- throw(handle_parse_error(ErrorTerm, St))
+ {error,Errors} ->
+ throw(Errors)
end
end;
set_scan_parse_pass_1([], _) -> [].
-parse_pass(#st{code=Tokens}=St) ->
- case catch asn1ct_parser2:parse(Tokens) of
+parse_pass(#st{file=File,code=Tokens}=St) ->
+ case asn1ct_parser2:parse(File, Tokens) of
{ok,M} ->
{ok,St#st{code=M}};
- {error,ErrorTerm} ->
- {error,St#st{error=handle_parse_error(ErrorTerm, St)}}
- end.
-
-handle_parse_error(ErrorTerm, #st{file=File,opts=Opts}) ->
- case ErrorTerm of
- {{Line,_Mod,Message},_TokTup} ->
- if
- is_integer(Line) ->
- BaseName = filename:basename(File),
- error("syntax error at line ~p in module ~s:~n",
- [Line,BaseName], Opts);
- true ->
- error("syntax error in module ~p:~n",
- [File], Opts)
- end,
- print_error_message(Message),
- Message;
- {Line,_Mod,[Message,Token]} ->
- error("syntax error: ~p ~p at line ~p~n",
- [Message,Token,Line], Opts),
- {Line,[Message,Token]}
+ {error,Errors} ->
+ {error,St#st{error=Errors}}
end.
merge_pass(#st{file=Base,code=Code}=St) ->
@@ -559,7 +540,10 @@ unset_pos_mod(Def) when is_record(Def,pvaluesetdef) ->
unset_pos_mod(Def) when is_record(Def,pobjectdef) ->
Def#pobjectdef{pos=undefined};
unset_pos_mod(Def) when is_record(Def,pobjectsetdef) ->
- Def#pobjectsetdef{pos=undefined}.
+ Def#pobjectsetdef{pos=undefined};
+unset_pos_mod(#'ComponentType'{} = Def) ->
+ Def#'ComponentType'{pos=undefined};
+unset_pos_mod(Def) -> Def.
get_pos_of_def(#typedef{pos=Pos}) ->
Pos;
@@ -1406,33 +1390,6 @@ prepare_bytes(Bytes) -> list_to_binary(Bytes).
vsn() ->
?vsn.
-
-
-print_error_message([got,H|T]) when is_list(H) ->
- io:format(" got:"),
- print_listing(H,"and"),
- print_error_message(T);
-print_error_message([expected,H|T]) when is_list(H) ->
- io:format(" expected one of:"),
- print_listing(H,"or"),
- print_error_message(T);
-print_error_message([H|T]) ->
- io:format(" ~p",[H]),
- print_error_message(T);
-print_error_message([]) ->
- io:format("~n").
-
-print_listing([H1,H2|[]],AndOr) ->
- io:format(" ~p ~s ~p",[H1,AndOr,H2]);
-print_listing([H1,H2|T],AndOr) ->
- io:format(" ~p,",[H1]),
- print_listing([H2|T],AndOr);
-print_listing([H],_AndOr) ->
- io:format(" ~p",[H]);
-print_listing([],_) ->
- ok.
-
-
specialized_decode_prepare(Erule,M,TsAndVs,Options) ->
case lists:member(asn1config,Options) of
true ->
diff --git a/lib/asn1/src/asn1ct_check.erl b/lib/asn1/src/asn1ct_check.erl
index 240f1cbb16..99392d6eaa 100644
--- a/lib/asn1/src/asn1ct_check.erl
+++ b/lib/asn1/src/asn1ct_check.erl
@@ -23,8 +23,6 @@
%% Main Module for ASN.1 compile time functions
%-compile(export_all).
-%% Avoid warning for local function error/1 clashing with autoimported BIF.
--compile({no_auto_import,[error/1]}).
-export([check/2,storeindb/2,format_error/1]).
%-define(debug,1).
-include("asn1_records.hrl").
@@ -60,17 +58,9 @@
-define(N_BMPString, 30).
-define(TAG_PRIMITIVE(Num),
- case S#state.erule of
- ber ->
- #tag{class='UNIVERSAL',number=Num,type='IMPLICIT',form=0};
- _ -> []
- end).
+ #tag{class='UNIVERSAL',number=Num,type='IMPLICIT',form=0}).
-define(TAG_CONSTRUCTED(Num),
- case S#state.erule of
- ber ->
- #tag{class='UNIVERSAL',number=Num,type='IMPLICIT',form=32};
- _ -> []
- end).
+ #tag{class='UNIVERSAL',number=Num,type='IMPLICIT',form=32}).
-record(newt,{type=unchanged,tag=unchanged,constraint=unchanged,inlined=no}). % used in check_type to update type and tag
@@ -249,26 +239,18 @@ check_exports(S,Module = #module{}) ->
{exports,all} ->
[];
{exports,ExportList} when is_list(ExportList) ->
- IsNotDefined =
+ IsNotDefined =
fun(X) ->
- case catch get_referenced_type(S,X) of
- {error,{asn1,_}} ->
- true;
- _ -> false
+ try
+ _ = get_referenced_type(S,X),
+ false
+ catch {error,_} ->
+ true
end
end,
- case lists:filter(IsNotDefined,ExportList) of
- [] ->
- [];
- NoDefExp ->
- GetName =
- fun(T = #'Externaltypereference'{type=N})->
- %%{exported,undefined,entity,N}
- NewS=S#state{type=T,tname=N},
- error({export,"exported undefined entity",NewS})
- end,
- lists:map(GetName,NoDefExp)
- end
+ [return_asn1_error(S, Ext, {undefined_export, Undef}) ||
+ Ext = #'Externaltypereference'{type=Undef} <- ExportList,
+ IsNotDefined(Ext)]
end.
check_imports(S, #module{imports={imports,Imports}}) ->
@@ -276,53 +258,18 @@ check_imports(S, #module{imports={imports,Imports}}) ->
check_imports_1(_S, [], Acc) ->
Acc;
-check_imports_1(S, [#'SymbolsFromModule'{symbols=Imports,module=ModuleRef}|SFMs], Acc0) ->
+check_imports_1(S, [#'SymbolsFromModule'{symbols=Imports,module=ModuleRef}|SFMs], Acc) ->
Module = name_of_def(ModuleRef),
- Refs0 = [{catch get_referenced_type(S, Ref),Ref} || Ref <- Imports],
- Refs = [{M,R} || {{M,_},R} <- Refs0],
- {Illegal,Other} = lists:splitwith(fun({error,_}) -> true;
- (_) -> false
- end, Refs),
- ChainedRefs = [R || {M,R} <- Other, M =/= Module],
- IllegalRefs = [R || {error,R} <- Illegal] ++
- [R || {M,R} <- ChainedRefs,
- ok =/= chained_import(S, Module, M, name_of_def(R))],
- Acc = [return_asn1_error(S, Ref, {undefined_import,name_of_def(Ref),Module}) ||
- Ref <- IllegalRefs] ++ Acc0,
- check_imports_1(S, SFMs, Acc).
-
-chained_import(S,ImpMod,DefMod,Name) ->
- %% Name is a referenced structure that is not defined in ImpMod,
- %% but must be present in the Imports list of ImpMod. The chain of
- %% imports of Name must end in DefMod.
- GetImports =
- fun(_M_) ->
- case asn1_db:dbget(_M_,'MODULE') of
- #module{imports={imports,ImportList}} ->
- ImportList;
- _ -> []
- end
- end,
- FindNameInImports =
- fun([],N,_) -> {no_mod,N};
- ([#'SymbolsFromModule'{symbols=Imports,module=ModuleRef}|SFMs],N,F) ->
- case [name_of_def(X) || X <- Imports, name_of_def(X) =:= N] of
- [] -> F(SFMs,N,F);
- [N] -> {name_of_def(ModuleRef),N}
- end
- end,
- case GetImports(ImpMod) of
- [] ->
- error;
- Imps ->
- case FindNameInImports(Imps,Name,FindNameInImports) of
- {no_mod,_} ->
- error;
- {DefMod,_} -> ok;
- {OtherMod,_} ->
- chained_import(S,OtherMod,DefMod,Name)
- end
- end.
+ Refs = [{try get_referenced_type(S, Ref)
+ catch throw:Error -> Error end,
+ Ref}
+ || Ref <- Imports],
+ CreateError = fun(Ref) ->
+ Error = {undefined_import,name_of_def(Ref),Module},
+ return_asn1_error(S, Ref, Error)
+ end,
+ Errors = [CreateError(Ref) || {{error, _}, Ref} <- Refs],
+ check_imports_1(S, SFMs, Errors ++ Acc).
checkt(S0, Names) ->
Check = fun do_checkt/3,
@@ -335,7 +282,7 @@ checkt(S0, Names) ->
check_fold(S0, lists:reverse(CtxtSwitch), Check) ++ Types.
do_checkt(S, Name, #typedef{typespec=TypeSpec}=Type0) ->
- NewS = S#state{type=Type0,tname=Name},
+ NewS = S#state{tname=Name},
try check_type(NewS, Type0, TypeSpec) of
#type{}=Ts ->
case Type0#typedef.checked of
@@ -350,7 +297,7 @@ do_checkt(S, Name, #typedef{typespec=TypeSpec}=Type0) ->
end
catch
{error,Reason} ->
- error({type,Reason,NewS});
+ Reason;
{asn1_class,_ClassDef} ->
{asn1_class,Name};
pobjectsetdef ->
@@ -384,33 +331,32 @@ do_checkv(S, Name, Value)
is_record(Value, typedef); %Value set may be parsed as object set.
is_record(Value, pvaluedef);
is_record(Value, pvaluesetdef) ->
- NewS = S#state{value=Value},
- try check_value(NewS, Value) of
+ try check_value(S, Value) of
{valueset,VSet} ->
Pos = asn1ct:get_pos_of_def(Value),
CheckedVSDef = #typedef{checked=true,pos=Pos,
name=Name,typespec=VSet},
- asn1_db:dbput(NewS#state.mname, Name, CheckedVSDef),
+ asn1_db:dbput(S#state.mname, Name, CheckedVSDef),
{valueset,Name};
V ->
%% update the valuedef
- asn1_db:dbput(NewS#state.mname, Name, V),
+ asn1_db:dbput(S#state.mname, Name, V),
ok
catch
{error,Reason} ->
- error({value,Reason,NewS});
+ Reason;
{pobjectsetdef} ->
{pobjectsetdef,Name};
{objectsetdef} ->
{objectsetdef,Name};
- {objectdef} ->
+ {asn1_class, _} ->
%% this is an object, save as typedef
#valuedef{checked=C,pos=Pos,name=N,type=Type,
value=Def} = Value,
ClassName = Type#type.def,
NewSpec = #'Object'{classname=ClassName,def=Def},
NewDef = #typedef{checked=C,pos=Pos,name=N,typespec=NewSpec},
- asn1_db:dbput(NewS#state.mname, Name, NewDef),
+ asn1_db:dbput(S#state.mname, Name, NewDef),
{objectdef,Name}
end.
@@ -419,7 +365,7 @@ checkp(S, Names) ->
check_fold(S, Names, fun do_checkp/3).
do_checkp(S0, Name, #ptypedef{typespec=TypeSpec}=Type0) ->
- S = S0#state{type=Type0,tname=Name},
+ S = S0#state{tname=Name},
try check_ptype(S, Type0, TypeSpec) of
#type{}=Ts ->
Type = Type0#ptypedef{checked=true,typespec=Ts},
@@ -427,7 +373,7 @@ do_checkp(S0, Name, #ptypedef{typespec=TypeSpec}=Type0) ->
ok
catch
{error,Reason} ->
- error({type,Reason,S});
+ Reason;
{asn1_class,_ClassDef} ->
{asn1_class,Name};
{asn1_param_class,_} ->
@@ -438,100 +384,81 @@ do_checkp(S0, Name, #ptypedef{typespec=TypeSpec}=Type0) ->
checkc(S, Names) ->
check_fold(S, Names, fun do_checkc/3).
-do_checkc(S0, Name, Class0) ->
- {Class1,ClassSpec} =
- case Class0 of
- #classdef{} ->
- {Class0,Class0};
- #typedef{} ->
- {#classdef{name=Name},Class0#typedef.typespec}
- end,
- S = S0#state{type=Class0,tname=Name},
- try check_class(S, ClassSpec) of
- C ->
- Class = Class1#classdef{checked=true,typespec=C},
- asn1_db:dbput(S#state.mname, Name, Class),
- ok
- catch
- {error,Reason} ->
- error({class,Reason,S})
- end.
+do_checkc(S, Name, Class) ->
+ try
+ case is_classname(Name) of
+ false ->
+ asn1_error(S, {illegal_class_name,Name});
+ true ->
+ do_checkc_1(S, Name, Class)
+ end
+ catch {error,Reason} -> Reason
+ end.
+
+do_checkc_1(S, Name, #classdef{}=Class) ->
+ C = check_class(S, Class),
+ store_class(S, true, Class#classdef{typespec=C}, Name),
+ ok;
+do_checkc_1(S, Name, #typedef{typespec=#type{def=Def}=TS}) ->
+ C = check_class(S, TS),
+ {Mod,Pos} = case Def of
+ #'Externaltypereference'{module=M, pos=P} ->
+ {M,P};
+ {pt, #'Externaltypereference'{module=M, pos=P}, _} ->
+ {M,P}
+ end,
+ Class = #classdef{name=Name, typespec=C, pos=Pos, module=Mod},
+ store_class(S, true, Class, Name),
+ ok.
+
+%% is_classname(Atom) -> true|false.
+is_classname(Name) when is_atom(Name) ->
+ lists:all(fun($-) -> true;
+ (D) when $0 =< D, D =< $9 -> true;
+ (UC) when $A =< UC, UC =< $Z -> true;
+ (_) -> false
+ end, atom_to_list(Name)).
-checko(S,[Name|Os],Acc,ExclO,ExclOS) ->
- ?dbg("Checking object ~p~n",[Name]),
- Result =
- case asn1_db:dbget(S#state.mname,Name) of
- undefined ->
- error({type,{internal_error,'???'},S});
- Object when is_record(Object,typedef) ->
- NewS = S#state{type=Object,tname=Name},
- case catch(check_object(NewS,Object,Object#typedef.typespec)) of
- {error,Reason} ->
- error({type,Reason,NewS});
- {'EXIT',Reason} ->
- error({type,{internal_error,Reason},NewS});
- {asn1,Reason} ->
- error({type,Reason,NewS});
- O ->
- NewObj = Object#typedef{checked=true,typespec=O},
- asn1_db:dbput(NewS#state.mname,Name,NewObj),
- if
- is_record(O,'Object') ->
- case O#'Object'.gen of
- true ->
- {ok,ExclO,ExclOS};
- false ->
- {ok,[Name|ExclO],ExclOS}
- end;
- is_record(O,'ObjectSet') ->
- case O#'ObjectSet'.gen of
- true ->
- {ok,ExclO,ExclOS};
- false ->
- {ok,ExclO,[Name|ExclOS]}
- end
- end
- end;
- PObject when is_record(PObject,pobjectdef) ->
- NewS = S#state{type=PObject,tname=Name},
- case (catch check_pobject(NewS,PObject)) of
- {error,Reason} ->
- error({type,Reason,NewS});
- {'EXIT',Reason} ->
- error({type,{internal_error,Reason},NewS});
- {asn1,Reason} ->
- error({type,Reason,NewS});
- PO ->
- NewPObj = PObject#pobjectdef{def=PO},
- asn1_db:dbput(NewS#state.mname,Name,NewPObj),
- {ok,[Name|ExclO],ExclOS}
- end;
- PObjSet when is_record(PObjSet,pvaluesetdef) ->
- %% this is a parameterized object set. Might be a parameterized
- %% value set, couldn't it?
- NewS = S#state{type=PObjSet,tname=Name},
- case (catch check_pobjectset(NewS,PObjSet)) of
- {error,Reason} ->
- error({type,Reason,NewS});
- {'EXIT',Reason} ->
- error({type,{internal_error,Reason},NewS});
- {asn1,Reason} ->
- error({type,Reason,NewS});
- POS ->
- %%NewPObjSet = PObjSet#pvaluesetdef{valueset=POS},
- asn1_db:dbput(NewS#state.mname,Name,POS),
- {ok,ExclO,[Name|ExclOS]}
- end
- end,
- case Result of
- {ok,NewExclO,NewExclOS} ->
- checko(S,Os,Acc,NewExclO,NewExclOS);
- _ ->
- checko(S,Os,[Result|Acc],ExclO,ExclOS)
+checko(S0,[Name|Os],Acc,ExclO,ExclOS) ->
+ Item = asn1_db:dbget(S0#state.mname, Name),
+ S = S0#state{error_context=Item},
+ try checko_1(S, Item, Name, ExclO, ExclOS) of
+ {NewExclO,NewExclOS} ->
+ checko(S, Os, Acc, NewExclO, NewExclOS)
+ catch
+ throw:{error, Error} ->
+ checko(S, Os, [Error|Acc], ExclO, ExclOS)
end;
checko(_S,[],Acc,ExclO,ExclOS) ->
{lists:reverse(Acc),lists:reverse(ExclO),lists:reverse(ExclOS)}.
+checko_1(S, #typedef{typespec=TS}=Object, Name, ExclO, ExclOS) ->
+ NewS = S#state{tname=Name},
+ O = check_object(NewS, Object, TS),
+ NewObj = Object#typedef{checked=true,typespec=O},
+ asn1_db:dbput(NewS#state.mname, Name, NewObj),
+ case O of
+ #'Object'{gen=true} ->
+ {ExclO,ExclOS};
+ #'Object'{gen=false} ->
+ {[Name|ExclO],ExclOS};
+ #'ObjectSet'{gen=true} ->
+ {ExclO,ExclOS};
+ #'ObjectSet'{gen=false} ->
+ {ExclO,[Name|ExclOS]}
+ end;
+checko_1(S, #pobjectdef{}=PObject, Name, ExclO, ExclOS) ->
+ NewS = S#state{tname=Name},
+ PO = check_pobject(NewS, PObject),
+ NewPObj = PObject#pobjectdef{def=PO},
+ asn1_db:dbput(NewS#state.mname, Name, NewPObj),
+ {[Name|ExclO],ExclOS};
+checko_1(S, #pvaluesetdef{}=PObjSet, Name, ExclO, ExclOS) ->
+ NewS = S#state{tname=Name},
+ POS = check_pobjectset(NewS, PObjSet),
+ asn1_db:dbput(NewS#state.mname, Name, POS),
+ {ExclO,[Name|ExclOS]}.
+
check_class(S,CDef=#classdef{checked=Ch,name=Name,typespec=TS}) ->
case Ch of
true -> TS;
@@ -551,22 +478,16 @@ check_class(S = #state{mname=M,tname=T},ClassSpec)
Tref = #'Externaltypereference'{type=TName} ->
{MName,RefType} = get_referenced_type(S,Tref),
#classdef{} = CD = get_class_def(S, RefType),
- NewState = update_state(S#state{type=RefType,
- tname=TName}, MName),
+ NewState = update_state(S#state{tname=TName}, MName),
check_class(NewState, CD);
{pt,ClassRef,Params} ->
%% parameterized class
{_,PClassDef} = get_referenced_type(S,ClassRef),
- NewParaList =
- [match_parameters(S,TmpParam,S#state.parameters)||
- TmpParam <- Params],
+ NewParaList = match_parameters(S, Params),
instantiate_pclass(S,PClassDef,NewParaList)
end;
-check_class(S,C) when is_record(C,objectclass) ->
- NewFieldSpec = check_class_fields(S,C#objectclass.fields),
- C#objectclass{fields=NewFieldSpec};
-check_class(_S,{poc,_ObjSet,_Params}) ->
- 'fix this later';
+check_class(S, #objectclass{}=C) ->
+ check_objectclass(S, C);
check_class(S,ClassName) ->
{RefMod,Def} = get_referenced_type(S,ClassName),
case Def of
@@ -579,8 +500,7 @@ check_class(S,ClassName) ->
false ->
Name=ClassName#'Externaltypereference'.type,
store_class(S,idle,ClassDef,Name),
-% NewS = S#state{mname=RefMod,type=Def,tname=Name},
- NewS = update_state(S#state{type=Def,tname=Name},RefMod),
+ NewS = update_state(S#state{tname=Name}, RefMod),
CheckedTS = check_class(NewS,ClassDef#classdef.typespec),
store_class(S,true,ClassDef#classdef{typespec=CheckedTS},Name),
CheckedTS
@@ -594,11 +514,20 @@ check_class(S,ClassName) ->
end
end.
+check_objectclass(S, #objectclass{fields=Fs0,syntax=Syntax0}=C) ->
+ Fs = check_class_fields(S, Fs0),
+ case Syntax0 of
+ {'WITH SYNTAX',Syntax1} ->
+ Syntax = preprocess_syntax(S, Syntax1, Fs),
+ C#objectclass{fields=Fs,syntax={preprocessed_syntax,Syntax}};
+ _ ->
+ C#objectclass{fields=Fs}
+ end.
+
instantiate_pclass(S=#state{parameters=_OldArgs},PClassDef,Params) ->
#ptypedef{args=Args,typespec=Type} = PClassDef,
MatchedArgs = match_args(S,Args, Params, []),
-% NewS = S#state{type=Type,parameters=MatchedArgs++OldArgs,abscomppath=[]},
- NewS = S#state{type=Type,parameters=MatchedArgs,abscomppath=[]},
+ NewS = S#state{parameters=MatchedArgs,abscomppath=[]},
check_class(NewS,#classdef{name=S#state.tname,typespec=Type}).
store_class(S,Mode,ClassDef,ClassName) ->
@@ -613,6 +542,12 @@ check_class_fields(S,[F|Fields],Acc) ->
case element(1,F) of
fixedtypevaluefield ->
{_,Name,Type,Unique,OSpec} = F,
+ case {Unique,OSpec} of
+ {'UNIQUE',{'DEFAULT',_}} ->
+ asn1_error(S, {unique_and_default,Name});
+ {_,_} ->
+ ok
+ end,
RefType = check_type(S,#typedef{typespec=Type},Type),
{fixedtypevaluefield,Name,RefType,Unique,OSpec};
object_or_fixedtypevalue_field ->
@@ -621,7 +556,7 @@ check_class_fields(S,[F|Fields],Acc) ->
Cat =
case asn1ct_gen:type(asn1ct_gen:get_inner(Type2#type.def)) of
Def when is_record(Def,'Externaltypereference') ->
- {_,D} = get_referenced_type(S,Def),
+ {_,D} = get_referenced_type(S, Def, true),
D;
{undefined,user} ->
%% neither of {primitive,bif} or {constructed,bif}
@@ -644,18 +579,14 @@ check_class_fields(S,[F|Fields],Acc) ->
objectset_or_fixedtypevalueset_field ->
{_,Name,Type,OSpec} = F,
RefType =
- case (catch check_type(S,#typedef{typespec=Type},Type)) of
- {asn1_class,_ClassDef} ->
+ try check_type(S,#typedef{typespec=Type},Type) of
+ #type{} = CheckedType ->
+ CheckedType
+ catch {asn1_class,_ClassDef} ->
case if_current_checked_type(S,Type) of
- true ->
- Type#type.def;
- _ ->
- check_class(S,Type)
- end;
- CheckedType when is_record(CheckedType,type) ->
- CheckedType;
- _ ->
- error({class,"internal error, check_class_fields",S})
+ true -> Type#type.def;
+ _ -> check_class(S,Type)
+ end
end,
if
is_record(RefType,'Externaltypereference') ->
@@ -733,38 +664,34 @@ check_pobjectset(S,PObjSet) ->
PObjSet
end.
+-record(osi, %Object set information.
+ {st,
+ classref,
+ uniq,
+ ext
+ }).
+
check_object(_S,ObjDef,ObjSpec) when (ObjDef#typedef.checked == true) ->
ObjSpec;
check_object(S,_ObjDef,#'Object'{classname=ClassRef,def=ObjectDef}) ->
?dbg("check_object ~p~n",[ObjectDef]),
-%% io:format("check_object,object: ~p~n",[ObjectDef]),
-% {MName,_ClassDef} = get_referenced_type(S,ClassRef),
- NewClassRef = check_externaltypereference(S,ClassRef),
- ClassDef =
- case get_referenced_type(S,ClassRef) of
- {MName,ClDef=#classdef{checked=false}} ->
- NewState = update_state(S#state{type=ClDef,
- tname=ClassRef#'Externaltypereference'.type},MName),
- ObjClass=
- check_class(NewState,ClDef),
- #classdef{checked=true,
- typespec=ObjClass};
- {_,_ClDef} when is_record(_ClDef,classdef) ->
- _ClDef;
- {MName,_TDef=#typedef{checked=false,pos=Pos,
- name=_TName,typespec=TS}} ->
- ClDef = #classdef{pos=Pos,name=_TName,typespec=TS},
- NewState = update_state(S#state{type=_TDef,
- tname=ClassRef#'Externaltypereference'.type},MName),
- ObjClass =
- check_class(NewState,ClDef),
- ClDef#classdef{checked=true,typespec=ObjClass};
- {_,_ClDef} ->
- _ClDef
+ _ = check_externaltypereference(S,ClassRef),
+ {ClassDef, NewClassRef} =
+ case get_referenced_type(S, ClassRef, true) of
+ {MName,#classdef{checked=false, name=CLName}=ClDef} ->
+ Type = ClassRef#'Externaltypereference'.type,
+ NewState = update_state(S#state{tname=Type}, MName),
+ ObjClass = check_class(NewState, ClDef),
+ {ClDef#classdef{checked=true, typespec=ObjClass},
+ #'Externaltypereference'{module=MName, type=CLName}};
+ {MName,#classdef{name=CLName}=ClDef} ->
+ {ClDef, #'Externaltypereference'{module=MName, type=CLName}};
+ _ ->
+ asn1_error(S, illegal_object)
end,
NewObj =
case ObjectDef of
- Def when is_tuple(Def), (element(1,Def)==object) ->
+ {object,_,_}=Def ->
NewSettingList = check_objectdefn(S,Def,ClassDef),
#'Object'{def=NewSettingList};
{po,{object,DefObj},ArgsList} ->
@@ -778,425 +705,287 @@ check_object(S,_ObjDef,#'Object'{classname=ClassRef,def=ObjectDef}) ->
instantiate_po(S,ClassDef,Object,ArgList);
#'Externalvaluereference'{} ->
{_,Object} = get_referenced_type(S,ObjectDef),
- check_object(S,Object,Object#typedef.typespec);
+ check_object(S, Object, object_to_check(S, Object));
[] ->
- %% An object with no fields. All class fields must be
- %% optional or default. Check that all fields in
- %% class are 'OPTIONAL' or 'DEFAULT'
- class_fields_optional_check(S,ClassDef),
- #'Object'{def={object,defaultsyntax,[]}};
- _ ->
- exit({error,{no_object,ObjectDef},S})
+ %% An object with no fields (parsed as a value).
+ Def = {object,defaultsyntax,[]},
+ NewSettingList = check_objectdefn(S, Def, ClassDef),
+ #'Object'{def=NewSettingList};
+ _ ->
+ asn1_error(S, illegal_object)
end,
- Gen = gen_incl(S,NewObj#'Object'.def,
- (ClassDef#classdef.typespec)#objectclass.fields),
+ Fields = (ClassDef#classdef.typespec)#objectclass.fields,
+ Gen = gen_incl(S,NewObj#'Object'.def, Fields),
NewObj#'Object'{classname=NewClassRef,gen=Gen};
-
-
-check_object(S,
- _ObjSetDef,
- ObjSet=#'ObjectSet'{class=ClassRef}) ->
-%% io:format("check_object,SET: ~p~n",[ObjSet#'ObjectSet'.set]),
- ?dbg("check_object set: ~p~n",[ObjSet#'ObjectSet'.set]),
- {_,ClassDef} = get_referenced_type(S,ClassRef),
- NewClassRef = check_externaltypereference(S,ClassRef),
- {UniqueFieldName,UniqueInfo} =
- case (catch get_unique_fieldname(S,ClassDef)) of
- {error,'__undefined_',_} ->
- {{unique,undefined},{unique,undefined}};
- {asn1,Msg,_} -> error({class,Msg,S});
- {'EXIT',Msg} -> error({class,{internal_error,Msg},S});
+check_object(S, _, #'ObjectSet'{class=ClassRef0,set=Set0}=ObjSet0) ->
+ {_,ClassDef} = get_referenced_type(S, ClassRef0),
+ ClassRef = check_externaltypereference(S, ClassRef0),
+ {UniqueFieldName,UniqueInfo} =
+ case get_unique_fieldname(S, ClassDef) of
+ no_unique -> {{unique,undefined},{unique,undefined}};
Other -> {element(1,Other),Other}
end,
- NewObjSet=
- case prepare_objset(ObjSet#'ObjectSet'.set) of
- {set,SET,EXT} ->
- CheckedSet = check_object_list(S,NewClassRef,SET),
- NewSet = get_unique_valuelist(S,CheckedSet,UniqueInfo),
- ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
- set=extensionmark(NewSet,EXT)};
-
- {'SingleValue',ERef = #'Externalvaluereference'{}} ->
- {RefedMod,ObjDef} = get_referenced_type(S,ERef),
- #'Object'{def=CheckedObj} =
- check_object(S,ObjDef,ObjDef#typedef.typespec),
-
- NewSet = get_unique_valuelist(S,[{{RefedMod,get_datastr_name(ObjDef)},
- CheckedObj}],
- UniqueInfo),
- ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
- set=NewSet};
- ['EXTENSIONMARK'] ->
- ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
- set=['EXTENSIONMARK']};
-
- OSref when is_record(OSref,'Externaltypereference') ->
- {_,OS=#typedef{typespec=OSdef}} = get_referenced_type(S,OSref),
- check_object(S,OS,OSdef);
-
- {Type,{'EXCEPT',Exclusion}} when is_record(Type,type) ->
- {_,TDef} = get_referenced_type(S,Type#type.def),
- OS = TDef#typedef.typespec,
- NewSet = reduce_objectset(OS#'ObjectSet'.set,Exclusion),
- NewOS = OS#'ObjectSet'{set=NewSet},
- check_object(S,TDef#typedef{typespec=NewOS},
- NewOS);
- #type{def={pt,DefinedObjSet,ParamList}} ->
- {_,PObjSetDef} = get_referenced_type(S,DefinedObjSet),
- NewParamList =
- [match_parameters(S,TmpParam,S#state.parameters)||
- TmpParam <- ParamList],
- instantiate_pos(S,ClassRef,PObjSetDef,NewParamList);
-
- %% actually this is an ObjectSetFromObjects construct, it
- %% is when the object set is retrieved from an object
- %% field.
- #type{def=#'ObjectClassFieldType'{classname=ObjName,
- fieldname=FieldName}} ->
- {RefedObjMod,TDef} = get_referenced_type(S,ObjName),
- OS=TDef#typedef.typespec,
- %% should get the right object set here. Get the field
- %% FieldName out of the object set OS of class
- %% OS#'ObjectSet'.class
- OS2=check_object(S,TDef,OS),
- NewSet=object_set_from_objects(S,RefedObjMod,FieldName,OS2),
- ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
- set=NewSet};
- {'ObjectSetFromObjects',{_,_,ObjName},FieldName} ->
- {RefedObjMod,TDef} = get_referenced_type(S,ObjName),
- OS=TDef#typedef.typespec,
- %% should get the right object set here. Get the field
- %% FieldName out of the object set OS of class
- %% OS#'ObjectSet'.class
- OS2=check_object(S,TDef,OS),
- NewSet=object_set_from_objects(S,RefedObjMod,FieldName,OS2),
- ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
- set=NewSet};
- {'ObjectSetFromObjects',{_,ObjName},FieldName} ->
- %% This is a ObjectSetFromObjects, i.e.
- %% ObjectSetFromObjects ::= ReferencedObjects "." FieldName
- %% with a defined object as ReferencedObjects. And
- %% the FieldName of the Class (object) contains an object set.
- {RefedObjMod,TDef} = get_referenced_type(S,ObjName),
- O1 = TDef#typedef.typespec,
- O2 = check_object(S,TDef,O1),
- NewSet = object_set_from_objects(S,RefedObjMod,FieldName,O2),
- OS2=ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
- set=NewSet},
- %%io:format("ObjectSet: ~p~n",[OS2]),
- OS2;
- {pos,{objectset,_,DefinedObjSet},Params} ->
- {_,PObjSetDef} = get_referenced_type(S,DefinedObjSet),
- NewParamList =
- [match_parameters(S,TmpParam,S#state.parameters)||
- TmpParam <- Params],
- instantiate_pos(S,ClassRef,PObjSetDef,NewParamList);
- Unknown ->
- exit({error,{unknown_object_set,Unknown},S})
- end,
- NewSet2 = remove_duplicate_objects(NewObjSet#'ObjectSet'.set),
- NewObjSet2 = NewObjSet#'ObjectSet'{set=NewSet2},
- Gen = gen_incl_set(S,NewObjSet2#'ObjectSet'.set,
- ClassDef),
- ?dbg("check_object done~n",[]),
- NewObjSet2#'ObjectSet'{class=NewClassRef,gen=Gen}.
+ OSI0 = #osi{st=S,classref=ClassRef,uniq=UniqueInfo,ext=false},
+ {Set1,OSI1} = if
+ is_list(Set0) ->
+ check_object_set_list(Set0, OSI0);
+ true ->
+ check_object_set(Set0, OSI0)
+ end,
+ Ext = case Set1 of
+ [] ->
+ %% FIXME: X420 does not compile unless we force
+ %% empty sets to be extensible. There should be
+ %% a better way.
+ true;
+ [_|_] ->
+ OSI1#osi.ext
+ end,
+ Set2 = remove_duplicate_objects(S, Set1),
+ Set = case Ext of
+ false -> Set2;
+ true -> Set2 ++ ['EXTENSIONMARK']
+ end,
+ ObjSet = ObjSet0#'ObjectSet'{uniquefname=UniqueFieldName,set=Set},
+ Gen = gen_incl_set(S, Set, ClassDef),
+ ObjSet#'ObjectSet'{class=ClassRef,gen=Gen}.
+
+check_object_set({element_set,Root0,Ext0}, OSI0) ->
+ OSI = case Ext0 of
+ none -> OSI0;
+ _ -> OSI0#osi{ext=true}
+ end,
+ case {Root0,Ext0} of
+ {empty,empty} -> {[],OSI};
+ {empty,Ext} -> check_object_set(Ext, OSI);
+ {Root,none} -> check_object_set(Root, OSI);
+ {Root,empty} -> check_object_set(Root, OSI);
+ {Root,Ext} -> check_object_set_list([Root,Ext], OSI)
+ end;
+check_object_set(#'Externaltypereference'{}=Ref, #osi{st=S}=OSI) ->
+ {_,#typedef{typespec=OSdef}=OS} = get_referenced_type(S, Ref),
+ ObjectSet = check_object(S, OS, OSdef),
+ check_object_set_objset(ObjectSet, OSI);
+check_object_set(#'Externalvaluereference'{}=Ref, #osi{st=S}=OSI) ->
+ {RefedMod,ObjName,#'Object'{def=Def}} = check_referenced_object(S, Ref),
+ ObjList = check_object_set_mk(RefedMod, ObjName, Def, OSI),
+ {ObjList,OSI};
+check_object_set({'EXCEPT',Incl0,Excl0}, OSI) ->
+ {Incl1,_} = check_object_set(Incl0, OSI),
+ {Excl1,_} = check_object_set(Excl0, OSI),
+ Exclude = sofs:set([N || {N,_} <- Excl1], [name]),
+ Incl2 = [{Name,Obj} || {Name,_,_}=Obj <- Incl1],
+ Incl3 = sofs:relation(Incl2, [{name,object}]),
+ Incl4 = sofs:drestriction(Incl3, Exclude),
+ Incl5 = sofs:to_external(Incl4),
+ Incl = [Obj || {_,Obj} <- Incl5],
+ {Incl,OSI};
+check_object_set({object,_,_}=Obj0, OSI) ->
+ #osi{st=S,classref=ClassRef} = OSI,
+ #'Object'{def=Def} =
+ check_object(S, #typedef{typespec=Obj0},
+ #'Object'{classname=ClassRef,def=Obj0}),
+ ObjList = check_object_set_mk(Def, OSI),
+ {ObjList,OSI};
+check_object_set(#'ObjectClassFieldType'{classname=ObjName,
+ fieldname=FieldNames},
+ #osi{st=S}=OSI) ->
+ Set = check_ObjectSetFromObjects(S, ObjName, FieldNames),
+ check_object_set_objset_list(Set, OSI);
+check_object_set({'ObjectSetFromObjects',Obj,FieldNames}, #osi{st=S}=OSI) ->
+ ObjName = element(tuple_size(Obj), Obj),
+ Set = check_ObjectSetFromObjects(S, ObjName, FieldNames),
+ check_object_set_objset_list(Set, OSI);
+check_object_set({pt,DefinedObjSet,ParamList0}, OSI) ->
+ #osi{st=S,classref=ClassRef} = OSI,
+ {_,PObjSetDef} = get_referenced_type(S, DefinedObjSet),
+ ParamList = match_parameters(S, ParamList0),
+ ObjectSet = instantiate_pos(S, ClassRef, PObjSetDef, ParamList),
+ check_object_set_objset(ObjectSet, OSI);
+check_object_set({pos,{objectset,_,DefinedObjSet},Params0}, OSI) ->
+ #osi{st=S,classref=ClassRef} = OSI,
+ {_,PObjSetDef} = get_referenced_type(S, DefinedObjSet),
+ Params = match_parameters(S, Params0),
+ ObjectSet = instantiate_pos(S, ClassRef, PObjSetDef, Params),
+ check_object_set_objset(ObjectSet, OSI);
+check_object_set({pv,{simpledefinedvalue,DefinedObject},Params}=PV, OSI) ->
+ #osi{st=S,classref=ClassRef} = OSI,
+ Args = match_parameters(S, Params),
+ #'Object'{def=Def} =
+ check_object(S, PV,
+ #'Object'{classname=ClassRef ,
+ def={po,{object,DefinedObject},Args}}),
+ ObjList = check_object_set_mk(Def, OSI),
+ {ObjList,OSI};
+check_object_set({'SingleValue',Val}, OSI) ->
+ check_object_set(Val, OSI);
+check_object_set({'ValueFromObject',{object,Object},FieldNames}, OSI) ->
+ #osi{st=S} = OSI,
+ case extract_field(S, Object, FieldNames) of
+ #'Object'{def=Def} ->
+ ObjList = check_object_set_mk(Def, OSI),
+ {ObjList,OSI};
+ _ ->
+ asn1_error(S, illegal_object)
+ end;
+check_object_set(#type{def=Def}, OSI) ->
+ check_object_set(Def, OSI);
+check_object_set({union,A0,B0}, OSI0) ->
+ {A,OSI1} = check_object_set(A0, OSI0),
+ {B,OSI} = check_object_set(B0, OSI1),
+ {A++B,OSI}.
+
+check_object_set_list([H|T], OSI0) ->
+ {Set0,OSI1} = check_object_set(H, OSI0),
+ {Set1,OSI2} = check_object_set_list(T, OSI1),
+ {Set0++Set1,OSI2};
+check_object_set_list([], OSI) ->
+ {[],OSI}.
+
+check_object_set_objset(#'ObjectSet'{set=Set}, OSI) ->
+ check_object_set_objset_list(Set, OSI).
+
+check_object_set_objset_list(Set, OSI) ->
+ check_object_set_objset_list_1(Set, OSI, []).
+
+check_object_set_objset_list_1(['EXTENSIONMARK'|T], OSI, Acc) ->
+ check_object_set_objset_list_1(T, OSI#osi{ext=true}, Acc);
+check_object_set_objset_list_1([H|T], OSI, Acc) ->
+ check_object_set_objset_list_1(T, OSI, [H|Acc]);
+check_object_set_objset_list_1([], OSI, Acc) ->
+ {Acc,OSI}.
+
+check_object_set_mk(Fields, OSI) ->
+ check_object_set_mk(no_mod, no_name, Fields, OSI).
+
+check_object_set_mk(M, N, Def, #osi{uniq={unique,undefined}}) ->
+ {_,_,Fields} = Def,
+ [{{M,N},no_unique_value,Fields}];
+check_object_set_mk(M, N, Def, #osi{uniq={UniqField,_}}) ->
+ {_,_,Fields} = Def,
+ case lists:keyfind(UniqField, 1, Fields) of
+ {UniqField,#valuedef{value=Val}} ->
+ [{{M,N},Val,Fields}];
+ false ->
+ case Fields of
+ [{_,#typedef{typespec=#'ObjectSet'{set=['EXTENSIONMARK']}}}] ->
+ %% FIXME: If object is missing the unique field and
+ %% only contains a reference to an empty object set,
+ %% we will remove the entire object as a workaround
+ %% to get X420 to compile. There should be a better
+ %% way.
+ [];
+ _ ->
+ [{{M,N},no_unique_value,Fields}]
+ end
+ end.
%% remove_duplicate_objects/1 remove duplicates of objects.
%% For instance may Set contain objects of same class from
%% different object sets that in fact might be duplicates.
-remove_duplicate_objects(Set) when is_list(Set) ->
- Pred = fun({A,B,_},{A,C,_}) when B =< C -> true;
- ({A,_,_},{B,_,_}) when A < B -> true;
- ('EXTENSIONMARK','EXTENSIONMARK') -> true;
- (T,A) when is_tuple(T),is_atom(A) -> true;% EXTENSIONMARK last in list
- (_,_) -> false
- end,
- lists:usort(Pred,Set).
+remove_duplicate_objects(S, Set0) when is_list(Set0) ->
+ Set1 = [{Id,Orig} || {_,Id,_}=Orig <- Set0],
+ Set2 = sofs:relation(Set1),
+ Set3 = sofs:relation_to_family(Set2),
+ Set = sofs:to_external(Set3),
+ remove_duplicate_objects_1(S, Set).
+
+remove_duplicate_objects_1(S, [{no_unique_value,Objs}|T]) ->
+ Objs ++ remove_duplicate_objects_1(S, T);
+remove_duplicate_objects_1(S, [{_,[_]=Objs}|T]) ->
+ Objs ++ remove_duplicate_objects_1(S, T);
+remove_duplicate_objects_1(S, [{Id,[_|_]=Objs}|T]) ->
+ MakeSortable = fun(What) -> sortable_type(S, What) end,
+ Tagged = order_tag_set(Objs, MakeSortable),
+ case lists:ukeysort(1, Tagged) of
+ [{_,Obj}] ->
+ [Obj|remove_duplicate_objects_1(S, T)];
+ [_|_] ->
+ asn1_error(S, {non_unique_object,Id})
+ end;
+remove_duplicate_objects_1(_, []) ->
+ [].
-%%
-extensionmark(L,true) ->
- case lists:member('EXTENSIONMARK',L) of
- true -> L;
- _ -> L ++ ['EXTENSIONMARK']
+order_tag_set([{_, _, Fields}=Orig|Fs], Fun) ->
+ Pair = {[{FId, traverse(F, Fun)} || {FId, F} <- Fields], Orig},
+ [Pair|order_tag_set(Fs, Fun)];
+order_tag_set([], _) -> [].
+
+sortable_type(S, #'Externaltypereference'{}=ERef) ->
+ try get_referenced_type(S, ERef) of
+ {_,#typedef{}=OI} ->
+ OI#typedef{pos=undefined,name=undefined}
+ catch
+ _:_ ->
+ ERef
end;
-extensionmark(L,_) ->
- L.
+sortable_type(_, #typedef{}=TD) ->
+ asn1ct:unset_pos_mod(TD#typedef{name=undefined});
+sortable_type(_, Type) ->
+ asn1ct:unset_pos_mod(Type).
+
+traverse(Structure0, Fun) ->
+ Structure = Fun(Structure0),
+ traverse_1(Structure, Fun).
+
+traverse_1(#typedef{typespec=TS0} = TD, Fun) ->
+ TS = traverse(TS0, Fun),
+ TD#typedef{typespec=TS};
+traverse_1(#valuedef{type=TS0} = VD, Fun) ->
+ TS = traverse(TS0, Fun),
+ VD#valuedef{type=TS};
+traverse_1(#type{def=TS0} = TD, Fun) ->
+ TS = traverse(TS0, Fun),
+ TD#type{def=TS};
+traverse_1(#'SEQUENCE'{components=Cs0} = Seq, Fun) ->
+ Cs = traverse_seq_set(Cs0, Fun),
+ Seq#'SEQUENCE'{components=Cs};
+traverse_1({'SEQUENCE OF',Type0}, Fun) ->
+ Type = traverse(Type0, Fun),
+ {'SEQUENCE OF',Type};
+traverse_1({'SET OF',Type0}, Fun) ->
+ Type = traverse(Type0, Fun),
+ {'SET OF',Type};
+traverse_1(#'SET'{components=Cs0} = Set, Fun) ->
+ Cs = traverse_seq_set(Cs0, Fun),
+ Set#'SET'{components=Cs};
+traverse_1({'CHOICE', Cs0}, Fun) ->
+ Cs = traverse_seq_set(Cs0, Fun),
+ {'CHOICE', Cs};
+traverse_1(Leaf, _) ->
+ Leaf.
+
+traverse_seq_set(List, Fun) when is_list(List) ->
+ traverse_seq_set_1(List, Fun);
+traverse_seq_set({Set, Ext}, Fun) ->
+ {traverse_seq_set_1(Set, Fun), traverse_seq_set_1(Ext, Fun)};
+traverse_seq_set({Set1, Set2, Set3}, Fun) ->
+ {traverse_seq_set_1(Set1, Fun),
+ traverse_seq_set_1(Set2, Fun),
+ traverse_seq_set_1(Set3, Fun)}.
+
+traverse_seq_set_1([#'ComponentType'{} = CT0|Cs], Fun) ->
+ CT = #'ComponentType'{typespec=TS0} = Fun(CT0),
+ TS = traverse(TS0, Fun),
+ [CT#'ComponentType'{typespec=TS}|traverse_seq_set_1(Cs, Fun)];
+traverse_seq_set_1([{'COMPONENTS OF', _} = CO0|Cs], Fun) ->
+ {'COMPONENTS OF', TS0} = Fun(CO0),
+ TS = traverse(TS0, Fun),
+ [{'COMPONENTS OF', TS}|traverse_seq_set_1(Cs, Fun)];
+traverse_seq_set_1([], _) ->
+ [].
-object_to_check(#typedef{typespec=ObjDef}) ->
+object_to_check(_, #typedef{typespec=ObjDef}) ->
ObjDef;
-object_to_check(#valuedef{type=ClassName,value=ObjectRef}) ->
+object_to_check(S, #valuedef{type=Class,value=ObjectRef}) ->
%% If the object definition is parsed as an object the ClassName
- %% is parsed as a type
- #'Object'{classname=ClassName#type.def,def=ObjectRef}.
-
-prepare_objset({'SingleValue',Set}) when is_list(Set) ->
- {set,Set,false};
-prepare_objset(L=['EXTENSIONMARK']) ->
- L;
-prepare_objset(Set) when is_list(Set) ->
- {set,Set,false};
-prepare_objset({{'SingleValue',Set},Ext}) ->
- {set,merge_sets(Set,Ext),true};
-%%prepare_objset({Set,Ext}) when is_list(Set),is_list(Ext) ->
-%% {set,lists:append([Set,Ext]),true};
-prepare_objset({Set,Ext}) when is_list(Set) ->
- {set,merge_sets(Set,Ext),true};
-prepare_objset({{object,definedsyntax,_ObjFields}=Set,Ext}) ->
- {set,merge_sets(Set, Ext),true};
-prepare_objset(ObjDef={object,definedsyntax,_ObjFields}) ->
- {set,[ObjDef],false};
-prepare_objset({ObjDef=#type{},Ext}) when is_list(Ext) ->
- {set,[ObjDef|Ext],true};
-prepare_objset({#type{}=Type,#type{}=Ext}) ->
- {set,[Type,Ext],true};
-prepare_objset(Ret) ->
- Ret.
-
-class_fields_optional_check(S,#classdef{typespec=ClassSpec}) ->
- Fields = ClassSpec#objectclass.fields,
- class_fields_optional_check1(S,Fields).
-
-class_fields_optional_check1(_S,[]) ->
- ok;
-class_fields_optional_check1(S,[{typefield,_,'OPTIONAL'}|Rest]) ->
- class_fields_optional_check1(S,Rest);
-class_fields_optional_check1(S,[{fixedtypevaluefield,_,_,_,'OPTIONAL'}|Rest]) ->
- class_fields_optional_check1(S,Rest);
-class_fields_optional_check1(S,[{fixedtypevaluesetfield,_,_,'OPTIONAL'}|Rest]) ->
- class_fields_optional_check1(S,Rest);
-class_fields_optional_check1(S,[{objectfield,_,_,_,'OPTIONAL'}|Rest]) ->
- class_fields_optional_check1(S,Rest);
-class_fields_optional_check1(S,[{objectsetfield,_,_,'OPTIONAL'}|Rest]) ->
- class_fields_optional_check1(S,Rest).
-
-%% ObjectSetFromObjects functionality
-
-%% The fieldname is a list of field names.They may be objects or
-%% object sets. If ObjectSet is an object set the resulting object set
-%% is the union of object sets if the last field name is an object
-%% set. If the last field is an object the resulting object set is
-%% the set of objects in ObjectSet.
-object_set_from_objects(S,RefedObjMod,FieldName,ObjectSet) ->
- object_set_from_objects(S,RefedObjMod,FieldName,ObjectSet,[]).
-object_set_from_objects(S,RefedObjMod,FieldName,ObjectSet,InterSect)
- when is_record(ObjectSet,'ObjectSet') ->
- #'ObjectSet'{class=Cl,set=Set} = ObjectSet,
- {_,ClassDef} = get_referenced_type(S,Cl),
- object_set_from_objects(S,RefedObjMod,ClassDef,FieldName,Set,InterSect,[]);
-object_set_from_objects(S,RefedObjMod,FieldName,Object,InterSect)
- when is_record(Object,'Object') ->
- #'Object'{classname=Cl,def=Def}=Object,
- object_set_from_objects(S,RefedObjMod,Cl,FieldName,[Def],InterSect,[]).
-object_set_from_objects(S,RefedObjMod,ClassDef,FieldName,['EXTENSIONMARK'|Os],
- InterSect,Acc) ->
- object_set_from_objects(S,RefedObjMod,ClassDef,FieldName,Os,InterSect,%%Acc);
- ['EXTENSIONMARK'|Acc]);
-object_set_from_objects(S,RefedObjMod,ClassDef,FieldName,[O|Os],InterSect,Acc) ->
- case object_set_from_objects2(S,mod_of_obj(RefedObjMod,element(1,O)),
- ClassDef,FieldName,element(3,O),InterSect) of
- ObjS when is_list(ObjS) ->
- object_set_from_objects(S,RefedObjMod,ClassDef,FieldName,Os,InterSect,ObjS++Acc);
- Obj ->
- object_set_from_objects(S,RefedObjMod,ClassDef,FieldName,Os,InterSect,[Obj|Acc])
- end;
-object_set_from_objects(_S,_RefedObjMod,_ClassDef,_FieldName,[],InterSect,Acc) ->
- %% For instance may Acc contain objects of same class from
- %% different object sets that in fact might be duplicates.
- remove_duplicate_objects(osfo_intersection(InterSect,Acc)).
-%% Acc.
-object_set_from_objects2(S,RefedObjMod,ClassDef,[{valuefieldreference,OName}],
- Fields,_InterSect) ->
- %% this is an object
- case lists:keysearch(OName,1,Fields) of
- {value,{_,TDef}} ->
- mk_object_set_from_object(S,RefedObjMod,TDef,ClassDef);
- _ ->
- [] % it may be an absent optional field
- end;
-object_set_from_objects2(S,RefedObjMod,ClassDef,[{typefieldreference,OSName}],
- Fields,_InterSect) ->
- %% this is an object set
- case lists:keysearch(OSName,1,Fields) of
- {value,{_,TDef}} ->
- case TDef#typedef.typespec of
- #'ObjectSet'{class=_NextClName,set=NextSet} ->%% = TDef#typedef.typespec,
- NextSet;
- #'Object'{def=_ObjDef} ->
- mk_object_set_from_object(S,RefedObjMod,TDef,ClassDef)
-%% ObjDef
- %% error({error,{internal,unexpected_object,TDef}})
- end;
- _ ->
- [] % it may be an absent optional field
- end;
-object_set_from_objects2(S,RefedObjMod,_ClassDef,[{valuefieldreference,OName}|Rest],
- Fields,InterSect) ->
- %% this is an object
- case lists:keysearch(OName,1,Fields) of
- {value,{_,TDef}} ->
- #'Object'{classname=NextClName,def=ODef}=TDef#typedef.typespec,
- {_,_,NextFields}=ODef,
- {_,NextClass} = get_referenced_type(S,NextClName),
- object_set_from_objects2(S,RefedObjMod,NextClass,Rest,NextFields,InterSect);
- _ ->
- []
- end;
-object_set_from_objects2(S,RefedObjMod,_ClassDef,[{typefieldreference,OSName}|Rest],
- Fields,InterSect) ->
- %% this is an object set
- Next = {NextClName,NextSet} =
- case lists:keysearch(OSName,1,Fields) of
- {value,{_,TDef}} when is_record(TDef,'ObjectSet') ->
- #'ObjectSet'{class=NextClN,set=NextS} = TDef,
- {NextClN,NextS};
- {value,{_,#typedef{typespec=OS}}} ->
- %% objectsets in defined syntax will come here as typedef{}
- %% #'ObjectSet'{class=NextClN,set=NextS} = OS,
- case OS of
- #'ObjectSet'{class=NextClN,set=NextS} ->
- {NextClN,NextS};
- #'Object'{classname=NextClN,def=NextDef} ->
- {NextClN,[NextDef]}
- end;
+ %% is parsed as a type.
+ case Class of
+ #type{def=#'Externaltypereference'{}=Def} ->
+ #'Object'{classname=Def,def=ObjectRef};
_ ->
- {[],[]}
- end,
- case Next of
- {[],[]} ->
- [];
- _ ->
- {_,NextClass} = get_referenced_type(S,NextClName),
- object_set_from_objects(S,RefedObjMod,NextClass,Rest,NextSet,InterSect,[])
- end.
-
-mk_object_set_from_object(S,RefedObjMod,TDef,Class) ->
- #'Object'{classname=_NextClName,def=ODef} = TDef#typedef.typespec,
- {_,_,NextFields}=ODef,
-
- UniqueFieldName =
- case (catch get_unique_fieldname(S,Class)) of
- {error,'__undefined_',_} -> {unique,undefined};
- {asn1,Msg,_} -> error({class,Msg,S});
- {'EXIT',Msg} -> error({class,{internal_error,Msg},S});
- {Other,_} -> Other
- end,
- VDef = get_unique_value(S,NextFields,UniqueFieldName),
- %% XXXXXXXXXXX
- case VDef of
- [] ->
- ['EXTENSIONMARK'];
- _ ->
- {{RefedObjMod,get_datastr_name(TDef)},VDef,NextFields}
+ asn1_error(S, illegal_object)
end.
-
-
-mod_of_obj(_RefedObjMod,{NewMod,ObjName})
- when is_atom(NewMod),is_atom(ObjName) ->
- NewMod;
-mod_of_obj(RefedObjMod,_) ->
- RefedObjMod.
-
-
-merge_sets(Root,{'SingleValue',Ext}) ->
- merge_sets(Root,Ext);
-merge_sets(Root,Ext) when is_list(Root),is_list(Ext) ->
- Root ++ Ext;
-merge_sets(Root,Ext) when is_list(Ext) ->
- [Root|Ext];
-merge_sets(Root,Ext) when is_list(Root) ->
- Root++[Ext];
-merge_sets(Root,Ext) ->
- [Root]++[Ext].
-
-reduce_objectset(ObjectSet,Exclusion) ->
- case Exclusion of
- {'SingleValue',#'Externalvaluereference'{value=Name}} ->
- case lists:keysearch(Name,1,ObjectSet) of
- {value,El} ->
- lists:subtract(ObjectSet,[El]);
- _ ->
- ObjectSet
- end
- end.
-
-%% Checks a list of objects or object sets and returns a list of selected
-%% information for the code generation.
-check_object_list(S,ClassRef,ObjectList) ->
- check_object_list(S,ClassRef,ObjectList,[]).
-
-check_object_list(S,ClassRef,[ObjOrSet|Objs],Acc) ->
- ?dbg("check_object_list: ~p~n",[ObjOrSet]),
- case ObjOrSet of
- ObjDef when is_tuple(ObjDef),(element(1,ObjDef)==object) ->
- Def =
- check_object(S,#typedef{typespec=ObjDef},
-% #'Object'{classname={objectclassname,ClassRef},
- #'Object'{classname=ClassRef,
- def=ObjDef}),
- check_object_list(S,ClassRef,Objs,[{{no_mod,no_name},Def#'Object'.def}|Acc]);
- {'SingleValue',Ref = #'Externalvaluereference'{}} ->
- ?dbg("{SingleValue,Externalvaluereference}~n",[]),
- {RefedMod,ObjName,
- #'Object'{def=Def}} = check_referenced_object(S,Ref),
- check_object_list(S,ClassRef,Objs,[{{RefedMod,ObjName},Def}|Acc]);
- ObjRef when is_record(ObjRef,'Externalvaluereference') ->
- ?dbg("Externalvaluereference~n",[]),
- {RefedMod,ObjName,
- #'Object'{def=Def}} = check_referenced_object(S,ObjRef),
- check_object_list(S,ClassRef,Objs,[{{RefedMod,ObjName},Def}|Acc]);
- {'ValueFromObject',{_,Object},FieldName} ->
- {_,Def} = get_referenced_type(S,Object),
- TypeDef = get_fieldname_element(S,Def,FieldName),
- (TypeDef#typedef.typespec)#'ObjectSet'.set;
- ObjSet when is_record(ObjSet,type) ->
- ObjSetDef =
- case ObjSet#type.def of
- Ref when is_record(Ref,'Externaltypereference') ->
- {_,D} = get_referenced_type(S,ObjSet#type.def),
- D;
- Other ->
- throw({asn1_error,{'unknown objecset',Other,S}})
- end,
- #'ObjectSet'{set=ObjectsInSet} =
- check_object(S,ObjSetDef,ObjSetDef#typedef.typespec),
- AccList = transform_set_to_object_list(ObjectsInSet,[]),
- check_object_list(S,ClassRef,Objs,AccList++Acc);
- union ->
- check_object_list(S,ClassRef,Objs,Acc);
- {pos,{objectset,_,DefinedObjectSet},Params} ->
- OSDef = #type{def={pt,DefinedObjectSet,Params}},
- #'ObjectSet'{set=Set} =
- check_object(S,ObjOrSet,#'ObjectSet'{class=ClassRef,
- set=OSDef}),
- check_object_list(S,ClassRef,Objs,Set ++ Acc);
- {pv,{simpledefinedvalue,DefinedObject},Params} ->
- Args = [match_parameters(S,Param,S#state.parameters)||
- Param<-Params],
- #'Object'{def=Def} =
- check_object(S,ObjOrSet,
- #'Object'{classname=ClassRef ,
- def={po,{object,DefinedObject},
- Args}}),
- check_object_list(S,ClassRef,Objs,[{{no_mod,no_name},Def}|Acc]);
- {'ObjectSetFromObjects',Os,FieldName} when is_tuple(Os) ->
- NewSet =
- check_ObjectSetFromObjects(S, element(tuple_size(Os), Os),
- FieldName,[]),
- check_object_list(S,ClassRef,Objs,NewSet++Acc);
- {{'ObjectSetFromObjects',Os,FieldName},InterSection}
- when is_tuple(Os) ->
- NewSet =
- check_ObjectSetFromObjects(S, element(tuple_size(Os), Os),
- FieldName,InterSection),
- check_object_list(S,ClassRef,Objs,NewSet++Acc);
- Other ->
- exit({error,{'unknown object',Other},S})
- end;
-%% Finally reverse the accumulated list and if there are any extension
-%% marks in the object set put one indicator of that in the end of the
-%% list.
-check_object_list(_,_,[],Acc) ->
- lists:reverse(Acc).
check_referenced_object(S,ObjRef)
when is_record(ObjRef,'Externalvaluereference')->
@@ -1213,195 +1002,134 @@ check_referenced_object(S,ObjRef)
check_object(update_state(S,RefedMod),ObjectDef,ObjectDef#typedef.typespec)}
end.
-check_ObjectSetFromObjects(S,ObjName,FieldName,InterSection) ->
- {RefedMod,TDef} = get_referenced_type(S,ObjName),
- ObjOrSet = check_object(update_state(S,RefedMod),TDef,TDef#typedef.typespec),
- InterSec = prepare_intersection(S,InterSection),
- _NewSet = object_set_from_objects(S,RefedMod,FieldName,ObjOrSet,InterSec).
+check_ObjectSetFromObjects(S, ObjName, Fields) ->
+ {_,Obj0} = get_referenced_type(S, ObjName),
+ case check_object(S, Obj0, Obj0#typedef.typespec) of
+ #'ObjectSet'{}=Obj1 ->
+ get_fieldname_set(S, Obj1, Fields);
+ #'Object'{classname=Class,
+ def={object,_,ObjFs}} ->
+ ObjSet = #'ObjectSet'{class=Class,
+ set=[{'_','_',ObjFs}]},
+ get_fieldname_set(S, ObjSet, Fields)
+ end.
-prepare_intersection(_S,[]) ->
- [];
-prepare_intersection(S,{'EXCEPT',ObjRef}) ->
- except_names(S,ObjRef);
-prepare_intersection(_S,T) ->
- exit({error,{internal_error,not_implemented,object_set_from_objects,T}}).
-except_names(_S,{'SingleValue',#'Externalvaluereference'{value=ObjName}}) ->
- [{except,ObjName}];
-except_names(_,T) ->
- exit({error,{internal_error,not_implemented,object_set_from_objects,T}}).
-
-osfo_intersection(InterSect,ObjList) ->
- Res = [X|| X = {{_,N},_,_} <- ObjList,
- lists:member({except,N},InterSect) == false],
- case lists:member('EXTENSIONMARK',ObjList) of
- true ->
- Res ++ ['EXTENSIONMARK'];
+%% get_type_from_object(State, ObjectOrObjectSet, [{RefType,FieldName}]) ->
+%% Type
+get_type_from_object(S, Object, FieldNames)
+ when is_record(Object, 'Externaltypereference');
+ is_record(Object, 'Externalvaluereference') ->
+ extract_field(S, Object, FieldNames).
+
+%% get_value_from_object(State, ObjectOrObjectSet, [{RefType,FieldName}]) ->
+%% UntaggedValue
+get_value_from_object(S, Def, FieldNames) ->
+ case extract_field(S, Def, FieldNames) of
+ #valuedef{value=Val} ->
+ Val;
+ {valueset,_}=Val ->
+ Val;
_ ->
- Res
+ asn1_error(S, illegal_value)
end.
-%% get_fieldname_element/3
-%% gets the type/value/object/... of the referenced element in FieldName
-%% FieldName is a list and may have more than one element.
-%% Each element in FieldName can be either {typefieldreference,AnyFieldName}
-%% or {valuefieldreference,AnyFieldName}
-%% Def is the def of the first object referenced by FieldName
-get_fieldname_element(S,Def,[{_RefType,FieldName}]) when is_record(Def,typedef) ->
- {_,_,ObjComps} = (Def#typedef.typespec)#'Object'.def,
- check_fieldname_element(S,lists:keysearch(FieldName,1,ObjComps));
-get_fieldname_element(S,Def,[{_RefType,FieldName}|Rest])
- when is_record(Def,typedef) ->
- %% As FieldName is followd by other FieldNames it has to be an
- %% object or objectset.
- {_,_,ObjComps} = (Def#typedef.typespec)#'Object'.def,
- NewDef = check_fieldname_element(S,lists:keysearch(FieldName,1,ObjComps)),
- ObjDef = fun(#'Object'{def=D}) -> D;
- (#'ObjectSet'{set=Set}) -> Set
- end
- (NewDef),
- case ObjDef of
+%% extract_field(State, ObjectOrObjectSet, [{RefType,FieldName}])
+%% RefType = typefieldreference | valuefieldreference
+%%
+%% Get the type, value, object, object set, or value set from the
+%% referenced object or object set. The list of field name tuples
+%% may have more than one element. All field names but the last
+%% refers to either an object or object set.
+
+extract_field(S, Def0, FieldNames) ->
+ {_,Def1} = get_referenced_type(S, Def0),
+ Def2 = check_object(S, Def1, Def1#typedef.typespec),
+ Def = Def1#typedef{typespec=Def2},
+ get_fieldname_element(S, Def, FieldNames).
+
+%% get_fieldname_element(State, Element, [{RefType,FieldName}]
+%% RefType = typefieldreference | valuefieldreference
+%%
+%% Get the type, value, object, object set, or value set from the referenced
+%% element. The list of field name tuples may have more than one element.
+%% All field names but the last refers to either an object or object set.
+
+get_fieldname_element(S, Object0, [{_RefType,FieldName}|Fields]) ->
+ Object = case Object0 of
+ #typedef{typespec=#'Object'{def=Obj}} -> Obj;
+ {_,_,_}=Obj -> Obj
+ end,
+ case check_fieldname_element(S, FieldName, Object) of
+ #'Object'{def=D} when Fields =/= [] ->
+ get_fieldname_element(S, D, Fields);
+ #'ObjectSet'{}=Set ->
+ get_fieldname_set(S, Set, Fields);
+ Result when Fields =:= [] ->
+ Result
+ end;
+get_fieldname_element(_S, Def, []) ->
+ Def.
+
+get_fieldname_set(S, #'ObjectSet'{set=Set0}, T) ->
+ get_fieldname_set_1(S, Set0, T, []).
+
+get_fieldname_set_1(S, ['EXTENSIONMARK'=Ext|T], Fields, Acc) ->
+ get_fieldname_set_1(S, T, Fields, [Ext|Acc]);
+get_fieldname_set_1(S, [H|T], Fields, Acc) ->
+ try get_fieldname_element(S, H, Fields) of
L when is_list(L) ->
- [get_fieldname_element(S,X,Rest) || X <- L];
- _ ->
- get_fieldname_element(S,ObjDef,Rest)
+ get_fieldname_set_1(S, T, Fields, L++Acc);
+ {valueset,L} ->
+ get_fieldname_set_1(S, T, Fields, L++Acc);
+ Other ->
+ get_fieldname_set_1(S, T, Fields, [Other|Acc])
+ catch
+ throw:{error,_} ->
+ get_fieldname_set_1(S, T, Fields, Acc)
end;
-get_fieldname_element(S,{object,_,Fields},[{_RefType,FieldName}|Rest]) ->
- NewDef = check_fieldname_element(S,lists:keysearch(FieldName,1,Fields)),
- get_fieldname_element(S,NewDef,Rest);
-get_fieldname_element(_S,Def,[]) ->
- Def;
-get_fieldname_element(_S,Def,[{_RefType,_FieldName}|_RestFName])
- when is_record(Def,typedef) ->
- ok.
+get_fieldname_set_1(_, [], _Fields, Acc) ->
+ case Acc of
+ [#valuedef{}|_] ->
+ {valueset,Acc};
+ _ ->
+ Acc
+ end.
-check_fieldname_element(S,{value,{_,Def}}) ->
- check_fieldname_element(S,Def);
-check_fieldname_element(S, #typedef{typespec=Ts}=TDef) ->
+check_fieldname_element(S, Name, {_,_,Fields}) ->
+ case lists:keyfind(Name, 1, Fields) of
+ {Name,Def} ->
+ check_fieldname_element_1(S, Def);
+ false ->
+ asn1_error(S, {undefined_field,Name})
+ end.
+
+check_fieldname_element_1(S, #typedef{typespec=Ts}=TDef) ->
case Ts of
#'Object'{} ->
check_object(S, TDef, Ts);
_ ->
check_type(S, TDef, Ts)
end;
-check_fieldname_element(S, #valuedef{}=VDef) ->
+check_fieldname_element_1(S, #valuedef{}=VDef) ->
try
check_value(S, VDef)
catch
- throw:{objectdef} ->
+ throw:{asn1_class, _} ->
#valuedef{checked=C,pos=Pos,name=N,type=Type,
value=Def} = VDef,
ClassName = Type#type.def,
NewSpec = #'Object'{classname=ClassName,def=Def},
NewDef = #typedef{checked=C,pos=Pos,name=N,typespec=NewSpec},
- check_fieldname_element(S, NewDef)
+ check_fieldname_element_1(S, NewDef)
end;
-check_fieldname_element(S,Eref)
- when is_record(Eref,'Externaltypereference');
- is_record(Eref,'Externalvaluereference') ->
- {_,TDef}=get_referenced_type(S,Eref),
- check_fieldname_element(S,TDef);
-check_fieldname_element(S,Other) ->
- throw({error,{assigned_object_error,"not_assigned_object",Other,S}}).
+check_fieldname_element_1(_S, {value_tag,Val}) ->
+ #valuedef{value=Val};
+check_fieldname_element_1(S, Eref)
+ when is_record(Eref, 'Externaltypereference');
+ is_record(Eref, 'Externalvaluereference') ->
+ {_,TDef} = get_referenced_type(S, Eref),
+ check_fieldname_element_1(S, TDef).
-transform_set_to_object_list([{Name,_UVal,Fields}|Objs],Acc) ->
- transform_set_to_object_list(Objs,[{Name,{object,generatesyntax,Fields}}|Acc]);
-transform_set_to_object_list(['EXTENSIONMARK'|Objs],Acc) ->
-%% transform_set_to_object_list(Objs,['EXTENSIONMARK'|Acc]);
- transform_set_to_object_list(Objs,Acc);
-transform_set_to_object_list([],Acc) ->
- Acc.
-
-get_unique_valuelist(_S,ObjSet,{unique,undefined}) -> % no unique field in object
- lists:map(fun({N,{_,_,F}})->{N,no_unique_value,F};
- (V={_,_,_}) ->V;
- ({A,B}) -> {A,no_unique_value,B}
- end, ObjSet);
-get_unique_valuelist(S,ObjSet,{UFN,Opt}) ->
- get_unique_vlist(S,ObjSet,UFN,Opt,[]).
-
-
-get_unique_vlist(_S,[],_,_,[]) ->
- ['EXTENSIONMARK'];
-get_unique_vlist(S,[],_,Opt,Acc) ->
- case catch check_uniqueness(remove_duplicate_objects(Acc)) of
- {asn1_error,_} when Opt =/= 'OPTIONAL' ->
- error({'ObjectSet',"not unique objects in object set",S});
- {asn1_error,_} ->
- lists:reverse(Acc);
- _ ->
- lists:reverse(Acc)
- end;
-get_unique_vlist(S,['EXTENSIONMARK'|Rest],UniqueFieldName,Opt,Acc) ->
- get_unique_vlist(S,Rest,UniqueFieldName,Opt,Acc);
-get_unique_vlist(S,[{ObjName,Obj}|Rest],UniqueFieldName,Opt,Acc) ->
- {_,_,Fields} = Obj,
- NewObjInf =
- case get_unique_value(S,Fields,UniqueFieldName) of
- #valuedef{value=V} -> [{ObjName,V,Fields}];
- [] -> []; % maybe the object only was a reference to an
- % empty object set.
- no_unique_value -> [{ObjName,no_unique_value,Fields}]
- end,
- get_unique_vlist(S,Rest,UniqueFieldName,Opt,NewObjInf++Acc);
-
-get_unique_vlist(S,[V={_,_,_}|Rest],UniqueFieldName,Opt,Acc) ->
- get_unique_vlist(S,Rest,UniqueFieldName,Opt,[V|Acc]).
-
-get_unique_value(S,Fields,UniqueFieldName) ->
- Module = S#state.mname,
- case lists:keysearch(UniqueFieldName,1,Fields) of
- {value,Field} ->
- case element(2,Field) of
- VDef when is_record(VDef,valuedef) ->
- VDef;
- {'ValueFromObject',Object,Name} ->
- case Object of
- {object,Ext} when is_record(Ext,'Externaltypereference') ->
- OtherModule = Ext#'Externaltypereference'.module,
- ExtObjName = Ext#'Externaltypereference'.type,
- ObjDef = asn1_db:dbget(OtherModule,ExtObjName),
- ObjSpec = ObjDef#typedef.typespec,
- get_unique_value(OtherModule,element(3,ObjSpec),Name);
- {object,{_,_,ObjName}} ->
- ObjDef = asn1_db:dbget(Module,ObjName),
- ObjSpec = ObjDef#typedef.typespec,
- get_unique_value(Module,element(3,ObjSpec),Name);
- {po,Object,_Params} ->
- exit({error,{'parameterized object not implemented yet',
- Object},S})
- end;
- Value when is_atom(Value);is_number(Value) ->
- #valuedef{value=Value,module=Module};
- {'CHOICE',{C,Value}} when is_atom(C) ->
- %% #valuedef{value=normalize_value(S,element(3,Field),VDef,[])}
- case Value of
- Scalar when is_atom(Scalar);is_number(Scalar) ->
- #valuedef{value=Value,module=Module};
- Eref = #'Externalvaluereference'{} ->
- element(2,get_referenced_type(S,Eref))
- end
- end;
- false ->
- case Fields of
- [{_,#typedef{typespec=#'ObjectSet'{set=['EXTENSIONMARK']}}}] ->
- [];
- _ ->
- no_unique_value
- end
- end.
-
-check_uniqueness(NameValueList) ->
- check_uniqueness1(lists:keysort(2,NameValueList)).
-
-check_uniqueness1([]) ->
- true;
-check_uniqueness1([_]) ->
- true;
-check_uniqueness1([{_,N,_},{_,N,_}|_Rest]) ->
- throw({asn1_error,{'objects in set must have unique values in UNIQUE fields',N}});
-check_uniqueness1([_|Rest]) ->
- check_uniqueness1(Rest).
-
%% instantiate_po/4
%% ClassDef is the class of Object,
%% Object is the Parameterized object, which is referenced,
@@ -1410,8 +1138,7 @@ check_uniqueness1([_|Rest]) ->
instantiate_po(S=#state{parameters=_OldArgs},_ClassDef,Object,ArgsList) when is_record(Object,pobjectdef) ->
FormalParams = get_pt_args(Object),
MatchedArgs = match_args(S,FormalParams,ArgsList,[]),
-% NewS = S#state{type=Object,parameters=MatchedArgs++OldArgs},
- NewS = S#state{type=Object,parameters=MatchedArgs},
+ NewS = S#state{parameters=MatchedArgs},
check_object(NewS,Object,#'Object'{classname=Object#pobjectdef.class,
def=Object#pobjectdef.def}).
@@ -1421,20 +1148,14 @@ instantiate_po(S=#state{parameters=_OldArgs},_ClassDef,Object,ArgsList) when is_
%% on the right side of the assignment,
%% ArgsList is the list of actual parameters, i.e. real objects
instantiate_pos(S=#state{parameters=_OldArgs},ClassRef,ObjectSetDef,ArgsList) ->
-% ClassName = ClassDef#classdef.name,
FormalParams = get_pt_args(ObjectSetDef),
OSet = case get_pt_spec(ObjectSetDef) of
- {valueset,Set} ->
-% #'ObjectSet'{class=name2Extref(S#state.mname,
-% ClassName),set=Set};
- #'ObjectSet'{class=ClassRef,set=Set};
- Set when is_record(Set,'ObjectSet') -> Set;
- _ ->
- error({type,"parameterized object set failure",S})
+ {valueset,Set} -> #'ObjectSet'{class=ClassRef,set=Set};
+ Set when is_record(Set,'ObjectSet') -> Set;
+ _ -> asn1_error(S, invalid_objectset)
end,
MatchedArgs = match_args(S,FormalParams,ArgsList,[]),
-% NewS = S#state{type=ObjectSetDef,parameters=MatchedArgs++OldArgs},
- NewS = S#state{type=ObjectSetDef,parameters=MatchedArgs},
+ NewS = S#state{parameters=MatchedArgs},
check_object(NewS,ObjectSetDef,OSet).
@@ -1468,7 +1189,7 @@ gen_incl1(S,Fields,[C|CFields]) ->
check_object(S,TDef,TDef#typedef.typespec);
ERef ->
{_,T} = get_referenced_type(S,ERef),
- check_object(S,T,object_to_check(T))
+ check_object(S, T, object_to_check(S, T))
end,
case gen_incl(S,ObjDef#'Object'.def,
ClassFields) of
@@ -1485,7 +1206,7 @@ gen_incl1(S,Fields,[C|CFields]) ->
end.
get_objclass_fields(S,Eref=#'Externaltypereference'{}) ->
- {_,ClassDef} = get_referenced_type(S,Eref),
+ {_,ClassDef} = get_referenced_type(S,Eref, true),
get_objclass_fields(S,ClassDef);
get_objclass_fields(S,CD=#classdef{typespec=#'Externaltypereference'{}}) ->
get_objclass_fields(S,CD#classdef.typespec);
@@ -1501,10 +1222,10 @@ gen_incl_set(S,Fields,#typedef{typespec=#type{def=Eref}})
{_,CDef} = get_referenced_type(S,Eref),
gen_incl_set(S,Fields,CDef);
gen_incl_set(S,Fields,ClassDef) ->
- case catch get_unique_fieldname(S,ClassDef) of
- Tuple when tuple_size(Tuple) =:= 3 ->
+ case get_unique_fieldname(S, ClassDef) of
+ no_unique ->
false;
- _ ->
+ {_, _} ->
gen_incl_set1(S,Fields,
(ClassDef#classdef.typespec)#objectclass.fields)
end.
@@ -1529,475 +1250,390 @@ gen_incl_set1(S,[Object|Rest],CFields)->
gen_incl_set1(S,Rest,CFields)
end.
-check_objectdefn(S,Def,CDef) when is_record(CDef,classdef) ->
- WithSyntax = (CDef#classdef.typespec)#objectclass.syntax,
- ClassFields = (CDef#classdef.typespec)#objectclass.fields,
+
+%%%
+%%% Check an object definition.
+%%%
+
+check_objectdefn(S, Def, #classdef{typespec=ObjClass}) ->
+ #objectclass{syntax=Syntax0,fields=ClassFields} = ObjClass,
case Def of
{object,defaultsyntax,Fields} ->
- check_defaultfields(S,Fields,ClassFields);
+ check_defaultfields(S, Fields, ClassFields);
{object,definedsyntax,Fields} ->
- {_,WSSpec} = WithSyntax,
- NewFields =
- case catch( convert_definedsyntax(S,Fields,WSSpec,
- ClassFields,[])) of
- {asn1,{_ErrorType,ObjToken,ClassToken}} ->
- throw({asn1,{'match error in object',ObjToken,
- 'found in object',ClassToken,'found in class'}});
- Err={asn1,_} -> throw(Err);
- Err={'EXIT',_} -> throw(Err);
- DefaultFields when is_list(DefaultFields) ->
- DefaultFields
- end,
- {object,defaultsyntax,NewFields};
- {object,_ObjectId} -> % This is a DefinedObject
- fixa;
- Other ->
- exit({error,{objectdefn,Other}})
+ Syntax = get_syntax(S, Syntax0, ClassFields),
+ case match_syntax(S, Syntax, Fields, []) of
+ {match,NewFields,[]} ->
+ {object,defaultsyntax,NewFields};
+ {match,_,[What|_]} ->
+ syntax_match_error(S, What);
+ {nomatch,[What|_]} ->
+ syntax_match_error(S, What);
+ {nomatch,[]} ->
+ syntax_match_error(S)
+ end
+ end.
+
+
+%%%
+%%% Pre-process the simplified syntax so that it can be more
+%%% easily matched.
+%%%
+
+get_syntax(_, {preprocessed_syntax,Syntax}, _) ->
+ Syntax;
+get_syntax(S, {'WITH SYNTAX',Syntax}, ClassFields) ->
+ preprocess_syntax(S, Syntax, ClassFields).
+
+preprocess_syntax(S, Syntax0, Cs) ->
+ Syntax = preprocess_syntax_1(S, Syntax0, Cs, true),
+ Present0 = preprocess_get_fields(Syntax, []),
+ Present1 = lists:sort(Present0),
+ Present = ordsets:from_list(Present1),
+ case Present =:= Present1 of
+ false ->
+ Dupl = Present1 -- Present,
+ asn1_error(S, {syntax_duplicated_fields,Dupl});
+ true ->
+ ok
+ end,
+ Mandatory0 = get_mandatory_class_fields(Cs),
+ Mandatory = ordsets:from_list(Mandatory0),
+ case ordsets:subtract(Mandatory, Present) of
+ [] ->
+ Syntax;
+ [_|_]=Missing ->
+ asn1_error(S, {syntax_missing_mandatory_fields,Missing})
end.
+preprocess_syntax_1(S, [H|T], Cs, Mandatory) when is_list(H) ->
+ [{optional,preprocess_syntax_1(S, H, Cs, false)}|
+ preprocess_syntax_1(S, T, Cs, Mandatory)];
+preprocess_syntax_1(S, [{valuefieldreference,Name}|T], Cs, Mandatory) ->
+ F = preprocess_check_field(S, Name, Cs, Mandatory),
+ [F|preprocess_syntax_1(S, T, Cs, Mandatory)];
+preprocess_syntax_1(S, [{typefieldreference,Name}|T], Cs, Mandatory) ->
+ F = preprocess_check_field(S, Name, Cs, Mandatory),
+ [F|preprocess_syntax_1(S, T, Cs, Mandatory)];
+preprocess_syntax_1(S,[{Token,_}|T], Cs, Mandatory) when is_atom(Token) ->
+ [{token,Token}|preprocess_syntax_1(S, T, Cs, Mandatory)];
+preprocess_syntax_1(S, [Token|T], Cs, Mandatory) when is_atom(Token) ->
+ [{token,Token}|preprocess_syntax_1(S, T, Cs, Mandatory)];
+preprocess_syntax_1(_, [], _, _) -> [].
+
+preprocess_check_field(S, Name, Cs, Mandatory) ->
+ case lists:keyfind(Name, 2, Cs) of
+ Tuple when is_tuple(Tuple) ->
+ case not Mandatory andalso is_mandatory_class_field(Tuple) of
+ true ->
+ asn1_error(S, {syntax_mandatory_in_optional_group,Name});
+ false ->
+ {field,Tuple}
+ end;
+ false ->
+ asn1_error(S, {syntax_undefined_field,Name})
+ end.
+
+preprocess_get_fields([{field,F}|T], Acc) ->
+ Name = element(2, F),
+ preprocess_get_fields(T, [Name|Acc]);
+preprocess_get_fields([{optional,L}|T], Acc) ->
+ preprocess_get_fields(T, preprocess_get_fields(L, Acc));
+preprocess_get_fields([_|T], Acc) ->
+ preprocess_get_fields(T, Acc);
+preprocess_get_fields([], Acc) ->
+ Acc.
+
+%%%
+%%% Match the actual fields in the object definition to
+%%% the pre-processed simplified syntax.
+%%%
+
+match_syntax(S, [{token,Token}|T], [A|As]=Args, Acc) ->
+ case A of
+ {word_or_setting,_,#'Externaltypereference'{type=Token}} ->
+ match_syntax(S, T, As, Acc);
+ {Token,Line} when is_integer(Line) ->
+ match_syntax(S, T, As, Acc);
+ _ ->
+ {nomatch,Args}
+ end;
+match_syntax(S, [{field,Field}|T]=Fs, [A|As0]=Args0, Acc) ->
+ try match_syntax_type(S, Field, A) of
+ {match,Match} ->
+ match_syntax(S, T, As0, lists:reverse(Match)++Acc);
+ {params,_Name,#ptypedef{args=Params}=P,Ref} ->
+ {Args,As} = lists:split(length(Params), As0),
+ Val = match_syntax_params(S, P, Ref, Args),
+ match_syntax(S, Fs, [Val|As], Acc)
+ catch
+ _:_ ->
+ {nomatch,Args0}
+ end;
+match_syntax(S, [{optional,L}|T], As0, Acc) ->
+ case match_syntax(S, L, As0, []) of
+ {match,Match,As} ->
+ match_syntax(S, T, As, lists:reverse(Match)++Acc);
+ {nomatch,As0} ->
+ match_syntax(S, T, As0, Acc);
+ {nomatch,_}=NoMatch ->
+ NoMatch
+ end;
+match_syntax(_, [_|_], [], _Acc) ->
+ {nomatch,[]};
+match_syntax(_, [], As, Acc) ->
+ {match,Acc,As}.
+
+match_syntax_type(S, Type, {value_tag,Val}) ->
+ match_syntax_type(S, Type, Val);
+match_syntax_type(S, Type, {setting,_,Val}) ->
+ match_syntax_type(S, Type, Val);
+match_syntax_type(S, Type, {word_or_setting,_,Val}) ->
+ match_syntax_type(S, Type, Val);
+match_syntax_type(_S, _Type, {Atom,Line})
+ when is_atom(Atom), is_integer(Line) ->
+ throw(nomatch);
+match_syntax_type(S, {fixedtypevaluefield,Name,#type{}=T,_,_}=Type,
+ #'Externalvaluereference'{}=ValRef0) ->
+ try get_referenced_type(S, ValRef0) of
+ {M,#valuedef{}=ValDef} ->
+ match_syntax_type(update_state(S, M), Type, ValDef)
+ catch
+ throw:{error,_} ->
+ ValRef = #valuedef{name=Name,
+ type=T,
+ value=ValRef0,
+ module=S#state.mname},
+ match_syntax_type(S, Type, ValRef)
+ end;
+match_syntax_type(S, {fixedtypevaluefield,Name,#type{},_,_}, #valuedef{}=Val0) ->
+ Val = check_value(S, Val0),
+ {match,[{Name,Val}]};
+match_syntax_type(S, {fixedtypevaluefield,Name,#type{},_,_},
+ {'ValueFromObject',{object,Object},FieldNames}) ->
+ Val = extract_field(S, Object, FieldNames),
+ {match,[{Name,Val}]};
+match_syntax_type(S, {fixedtypevaluefield,Name,#type{}=T,_,_}=Type, Any) ->
+ ValDef = #valuedef{name=Name,type=T,value=Any,module=S#state.mname},
+ match_syntax_type(S, Type, ValDef);
+match_syntax_type(_S, {fixedtypevaluesetfield,Name,#type{},_}, Any) ->
+ {match,[{Name,Any}]};
+match_syntax_type(S, {objectfield,Name,_,_,_}, #'Externalvaluereference'{}=Ref) ->
+ {M,Obj} = get_referenced_type(S, Ref),
+ check_object(S, Obj, object_to_check(S, Obj)),
+ {match,[{Name,Ref#'Externalvaluereference'{module=M}}]};
+match_syntax_type(S, {objectfield,Name,Class,_,_}, {object,_,_}=ObjDef) ->
+ InlinedObjName = list_to_atom(lists:concat([S#state.tname,
+ '_',Name])),
+ ObjSpec = #'Object'{classname=Class,def=ObjDef},
+ CheckedObj = check_object(S, #typedef{typespec=ObjSpec}, ObjSpec),
+ InlObj = #typedef{checked=true,name=InlinedObjName,typespec=CheckedObj},
+ ObjKey = {InlinedObjName, InlinedObjName},
+ insert_once(S, inlined_objects, ObjKey),
+ %% Which module to use here? Could it be other than top_module?
+ asn1_db:dbput(get(top_module), InlinedObjName, InlObj),
+ {match,[{Name,InlObj}]};
+match_syntax_type(_S, {objectfield,Name,_,_,_}, Any) ->
+ {match,[{Name,Any}]};
+match_syntax_type(S, {objectsetfield,Name,CDef0,_}, Any) ->
+ CDef = case CDef0 of
+ #type{def=CDef1} -> CDef1;
+ CDef1 -> CDef1
+ end,
+ case match_syntax_objset(S, Any, CDef) of
+ #typedef{typespec=#'ObjectSet'{}=Ts0}=Def ->
+ Ts = check_object(S, Def, Ts0),
+ {match,[{Name,Def#typedef{checked=true,typespec=Ts}}]};
+ _ ->
+ syntax_match_error(S, Any)
+ end;
+match_syntax_type(S, {typefield,Name0,_}, #type{def={pt,_,_}=Def}=Actual) ->
+ %% This is an inlined type. If constructed type, save in data base.
+ T = check_type(S, #typedef{typespec=Actual}, Actual),
+ #'Externaltypereference'{type=PtName} = element(2, Def),
+ NameList = [PtName,S#state.tname],
+ Name = list_to_atom(asn1ct_gen:list2name(NameList)),
+ NewTDef = #typedef{checked=true,name=Name,typespec=T},
+ asn1_db:dbput(S#state.mname, Name, NewTDef),
+ insert_once(S, parameterized_objects, {Name,type,NewTDef}),
+ {match,[{Name0,NewTDef}]};
+match_syntax_type(S, {typefield,Name,_}, #type{def=#'ObjectClassFieldType'{}}=Actual) ->
+ T = check_type(S, #typedef{typespec=Actual}, Actual),
+ {match,[{Name,ocft_def(T)}]};
+match_syntax_type(S, {typefield,Name,_}, #type{def=#'Externaltypereference'{}=Ref}) ->
+ match_syntax_external(S, Name, Ref);
+match_syntax_type(S, {typefield,Name,_}, #type{def=Def}=Actual) ->
+ T = check_type(S, #typedef{typespec=Actual}, Actual),
+ TypeName = asn1ct_gen:type(asn1ct_gen:get_inner(Def)),
+ {match,[{Name,#typedef{checked=true,name=TypeName,typespec=T}}]};
+match_syntax_type(S, {typefield,Name,_}, #'Externaltypereference'{}=Ref) ->
+ match_syntax_external(S, Name, Ref);
+match_syntax_type(_S, {variabletypevaluefield,Name,_,_}, Any) ->
+ {match,[{Name,Any}]};
+match_syntax_type(_S, {variabletypevaluesetfield,Name,_,_}, Any) ->
+ {match,[{Name,Any}]};
+match_syntax_type(_S, _Type, _Actual) ->
+ throw(nomatch).
+
+match_syntax_params(S0, #ptypedef{name=Name}=PtDef,
+ #'Externaltypereference'{module=M,type=N}=ERef0, Args) ->
+ S = S0#state{mname=M,module=load_asn1_module(S0, M),tname=Name},
+ Type = check_type(S, PtDef, #type{def={pt,ERef0,Args}}),
+ ERefName = new_reference_name(N),
+ ERef = #'Externaltypereference'{type=ERefName,module=S0#state.mname},
+ TDef = #typedef{checked=true,name=ERefName,typespec=Type},
+ insert_once(S0, parameterized_objects, {ERefName,type,TDef}),
+ asn1_db:dbput(S0#state.mname, ERef#'Externaltypereference'.type, TDef),
+ ERef.
+
+match_syntax_external(#state{mname=Mname}=S0, Name, Ref0) ->
+ {M,T0} = get_referenced_type(S0, Ref0),
+ Ref1 = Ref0#'Externaltypereference'{module=M},
+ case T0 of
+ #ptypedef{} ->
+ {params,Name,T0,Ref1};
+ #typedef{checked=false}=TDef0 when Mname =/= M ->
+ %% This typedef is an imported type (or maybe a set.asn
+ %% compilation).
+ S = S0#state{mname=M,module=load_asn1_module(S0, M),
+ tname=get_datastr_name(TDef0)},
+ Type = check_type(S, TDef0, TDef0#typedef.typespec),
+ TDef = TDef0#typedef{checked=true,typespec=Type},
+ asn1_db:dbput(M, get_datastr_name(TDef), TDef),
+ {match,[{Name,merged_name(S, Ref1)}]};
+ TDef ->
+ %% This might be a renamed type in a set of specs,
+ %% so rename the ref.
+ Type = asn1ct:get_name_of_def(TDef),
+ Ref = Ref1#'Externaltypereference'{type=Type},
+ {match,[{Name,Ref}]}
+ end.
+
+match_syntax_objset(_S, {element_set,_,_}=Set, ClassDef) ->
+ make_objset(ClassDef, Set);
+match_syntax_objset(S, #'Externaltypereference'{}=Ref, _) ->
+ {_,T} = get_referenced_type(S, Ref),
+ T;
+match_syntax_objset(S, #'Externalvaluereference'{}=Ref, _) ->
+ {_,T} = get_referenced_type(S, Ref),
+ T;
+match_syntax_objset(_, [_|_]=Set, ClassDef) ->
+ make_objset(ClassDef, Set);
+match_syntax_objset(S, {object,definedsyntax,Words}, ClassDef) ->
+ case Words of
+ [Word] ->
+ match_syntax_objset_1(S, Word, ClassDef);
+ [_|_] ->
+ %% More than one word does not make sense.
+ none
+ end;
+match_syntax_objset(S, #type{def=#'Externaltypereference'{}=Set}, ClassDef) ->
+ match_syntax_objset(S, Set, ClassDef);
+match_syntax_objset(_, #type{}, _) ->
+ none.
+
+match_syntax_objset_1(S, {setting,_,Set}, ClassDef) ->
+ %% Word that starts with an uppercase letter.
+ match_syntax_objset(S, Set, ClassDef);
+match_syntax_objset_1(S, {word_or_setting,_,Set}, ClassDef) ->
+ %% Word in uppercase/hyphens only.
+ match_syntax_objset(S, Set, ClassDef);
+match_syntax_objset_1(S, #type{def={'TypeFromObject', {object,Object}, FNs}},
+ ClassDef) ->
+ Set = extract_field(S, Object, FNs),
+ [_|_] = Set,
+ #typedef{checked=true,typespec=#'ObjectSet'{class=ClassDef,set=Set}};
+match_syntax_objset_1(_, #type{def=#'ObjectClassFieldType'{}}=Set, ClassDef) ->
+ make_objset(ClassDef, Set);
+match_syntax_objset_1(_, {object,_,_}=Object, ClassDef) ->
+ make_objset(ClassDef, [Object]).
+
+make_objset(ClassDef, Set) ->
+ #typedef{typespec=#'ObjectSet'{class=ClassDef,set=Set}}.
+
+syntax_match_error(S) ->
+ asn1_error(S, syntax_nomatch).
+
+syntax_match_error(S, What0) ->
+ What = printable_string(What0),
+ asn1_error(S, {syntax_nomatch,What}).
+
+printable_string(Def) ->
+ printable_string_1(Def).
+
+printable_string_1({word_or_setting,_,Def}) ->
+ printable_string_1(Def);
+printable_string_1({value_tag,V}) ->
+ printable_string_1(V);
+printable_string_1({#seqtag{val=Val1},Val2}) ->
+ atom_to_list(Val1) ++ " " ++ printable_string_1(Val2);
+printable_string_1(#type{def=Def}) ->
+ atom_to_list(asn1ct_gen:get_inner(Def));
+printable_string_1(#'Externaltypereference'{type=Type}) ->
+ atom_to_list(Type);
+printable_string_1(#'Externalvaluereference'{value=Type}) ->
+ atom_to_list(Type);
+printable_string_1({Atom,Line}) when is_atom(Atom), is_integer(Line) ->
+ q(Atom);
+printable_string_1({object,definedsyntax,L}) ->
+ q(string:join([printable_string_1(Item) || Item <- L], " "));
+printable_string_1([_|_]=Def) ->
+ case lists:all(fun is_integer/1, Def) of
+ true ->
+ lists:flatten(io_lib:format("~p", [Def]));
+ false ->
+ q(string:join([printable_string_1(Item) || Item <- Def], " "))
+ end;
+printable_string_1(Def) ->
+ lists:flatten(io_lib:format("~p", [Def])).
+
+q(S) ->
+ lists:concat(["\"",S,"\""]).
+
check_defaultfields(S, Fields, ClassFields) ->
Present = ordsets:from_list([F || {F,_} <- Fields]),
Mandatory0 = get_mandatory_class_fields(ClassFields),
Mandatory = ordsets:from_list(Mandatory0),
All = ordsets:from_list([element(2, F) || F <- ClassFields]),
- #state{type=T,tname=Obj} = S,
+ #state{tname=Obj} = S,
case ordsets:subtract(Present, All) of
[] ->
ok;
[_|_]=Invalid ->
- asn1_error(S, T, {invalid_fields,Invalid,Obj})
+ asn1_error(S, {invalid_fields,Invalid,Obj})
end,
case ordsets:subtract(Mandatory, Present) of
[] ->
check_defaultfields_1(S, Fields, ClassFields, []);
[_|_]=Missing ->
- asn1_error(S, T, {missing_mandatory_fields,Missing,Obj})
+ asn1_error(S, {missing_mandatory_fields,Missing,Obj})
end.
check_defaultfields_1(_S, [], _ClassFields, Acc) ->
{object,defaultsyntax,lists:reverse(Acc)};
check_defaultfields_1(S, [{FName,Spec}|Fields], ClassFields, Acc) ->
CField = lists:keyfind(FName, 2, ClassFields),
- {NewField,RestFields} =
- convert_to_defaultfield(S, FName, [Spec|Fields], CField),
- check_defaultfields_1(S, RestFields, ClassFields, [NewField|Acc]).
+ {match,Match} = match_syntax_type(S, CField, Spec),
+ check_defaultfields_1(S, Fields, ClassFields, Match++Acc).
-convert_definedsyntax(_S,[],[],_ClassFields,Acc) ->
- lists:reverse(Acc);
-convert_definedsyntax(S,Fields,WithSyntax,ClassFields,Acc) ->
- {MatchedField,RestFields,RestWS} =
- match_field(S,Fields,WithSyntax,ClassFields),
- if
- is_list(MatchedField) ->
- convert_definedsyntax(S,RestFields,RestWS,ClassFields,
- lists:append(MatchedField,Acc));
- true ->
- convert_definedsyntax(S,RestFields,RestWS,ClassFields,
- [MatchedField|Acc])
- end.
+get_mandatory_class_fields(ClassFields) ->
+ [element(2, F) || F <- ClassFields,
+ is_mandatory_class_field(F)].
-get_mandatory_class_fields([{fixedtypevaluefield,Name,_,_,'MANDATORY'}|T]) ->
- [Name|get_mandatory_class_fields(T)];
-get_mandatory_class_fields([{objectfield,Name,_,_,'MANDATORY'}|T]) ->
- [Name|get_mandatory_class_fields(T)];
-get_mandatory_class_fields([{objectsetfield,Name,_,'MANDATORY'}|T]) ->
- [Name|get_mandatory_class_fields(T)];
-get_mandatory_class_fields([{typefield,Name,'MANDATORY'}|T]) ->
- [Name|get_mandatory_class_fields(T)];
-get_mandatory_class_fields([{variabletypevaluefield,Name,_,'MANDATORY'}|T]) ->
- [Name|get_mandatory_class_fields(T)];
-get_mandatory_class_fields([{variabletypevaluesetfield,
- Name,_,'MANDATORY'}|T]) ->
- [Name|get_mandatory_class_fields(T)];
-get_mandatory_class_fields([_|T]) ->
- get_mandatory_class_fields(T);
-get_mandatory_class_fields([]) -> [].
-
-match_field(S,Fields,WithSyntax,ClassFields) ->
- match_field(S,Fields,WithSyntax,ClassFields,[]).
-
-match_field(S,Fields,[W|Ws],ClassFields,Acc) when is_list(W) ->
- case catch(match_optional_field(S,Fields,W,ClassFields,[])) of
- {'EXIT',_} ->
- match_field(Fields,Ws,ClassFields,Acc); %% add S
-%% {[Result],RestFields} ->
-%% {Result,RestFields,Ws};
- {Result,RestFields} when is_list(Result) ->
- {Result,RestFields,Ws};
- _ ->
- match_field(S,Fields,Ws,ClassFields,Acc)
- end;
-match_field(S,Fields,WithSyntax,ClassFields,_Acc) ->
- match_mandatory_field(S,Fields,WithSyntax,ClassFields,[]).
-
-match_optional_field(_S,RestFields,[],_,Ret) ->
- {Ret,RestFields};
-%% An additional optional field within an optional field
-match_optional_field(S,Fields,[W|Ws],ClassFields,Ret) when is_list(W) ->
- case catch match_optional_field(S,Fields,W,ClassFields,[]) of
- {'EXIT',_} when length(Ws) > 0 ->
- match_optional_field(S,Fields,Ws,ClassFields,Ret);
- {'EXIT',_} ->
- {Ret,Fields};
- {asn1,{optional_matcherror,_,_}} when length(Ws) > 0 ->
- match_optional_field(S,Fields,Ws,ClassFields,Ret);
- {asn1,{optional_matcherror,_,_}} ->
- {Ret,Fields};
- {OptionalField,RestFields} ->
- match_optional_field(S,RestFields,Ws,ClassFields,
- lists:append(OptionalField,Ret))
- end;
-%% identify and skip word
-match_optional_field(S,[{_,_,#'Externaltypereference'{type=WorS}}|Rest],
- [WorS|Ws],ClassFields,Ret) ->
- match_optional_field(S,Rest,Ws,ClassFields,Ret);
-match_optional_field(S,[],_,ClassFields,Ret) ->
- match_optional_field(S,[],[],ClassFields,Ret);
-%% identify and skip comma
-match_optional_field(S,[{WorS,_}|Rest],[{WorS,_}|Ws],ClassFields,Ret) ->
- match_optional_field(S,Rest,Ws,ClassFields,Ret);
-%% am optional setting inside another optional setting may be "double-listed"
-match_optional_field(S,[Setting],DefinedSyntax,ClassFields,Ret)
- when is_list(Setting) ->
- match_optional_field(S,Setting,DefinedSyntax,ClassFields,Ret);
-%% identify and save field data
-match_optional_field(S,[Setting|Rest],[{_,W}|Ws],ClassFields,Ret) ->
- ?dbg("matching optional field setting: ~p with user friendly syntax: ~p~n",[Setting,W]),
- WorS =
- case Setting of
- Type when is_record(Type,type) -> Type;
- {'ValueFromObject',_,_} -> Setting;
- {object,_,_} -> Setting;
- {_,_,WordOrSetting} -> WordOrSetting;
- Other -> Other
- end,
- case lists:keysearch(W,2,ClassFields) of
- false ->
- throw({asn1,{optional_matcherror,WorS,W}});
- {value,CField} ->
- {NewField,RestFields} =
- convert_to_defaultfield(S,W,[WorS|Rest],CField),
- match_optional_field(S,RestFields,Ws,ClassFields,[NewField|Ret])
- end;
-match_optional_field(_S,[WorS|_Rest],[W|_Ws],_ClassFields,_Ret) ->
- throw({asn1,{optional_matcherror,WorS,W}}).
-
-match_mandatory_field(_S,[],[],_,[Acc]) ->
- {Acc,[],[]};
-match_mandatory_field(_S,[],[],_,Acc) ->
- {Acc,[],[]};
-match_mandatory_field(S,[],[H|T],CF,Acc) when is_list(H) ->
- match_mandatory_field(S,[],T,CF,Acc);
-match_mandatory_field(_S,[],WithSyntax,_,_Acc) ->
- throw({asn1,{mandatory_matcherror,[],WithSyntax}});
-%match_mandatory_field(_S,Fields,WithSyntax=[W|_Ws],_ClassFields,[Acc]) when is_list(W) ->
-match_mandatory_field(_S,Fields,WithSyntax=[W|_Ws],_ClassFields,Acc) when is_list(W), length(Acc) >= 1 ->
- {Acc,Fields,WithSyntax};
-%% identify and skip word
-%%match_mandatory_field(S,[{_,_,WorS}|Rest],
-match_mandatory_field(S,[{_,_,#'Externaltypereference'{type=WorS}}|Rest],
- [WorS|Ws],ClassFields,Acc) ->
- match_mandatory_field(S,Rest,Ws,ClassFields,Acc);
-%% identify and skip comma
-match_mandatory_field(S,[{WorS,_}|Rest],[{WorS,_}|Ws],ClassFields,Ret) ->
- match_mandatory_field(S,Rest,Ws,ClassFields,Ret);
-%% identify and save field data
-match_mandatory_field(S,[Setting|Rest],[{_,W}|Ws],ClassFields,Acc) ->
- ?dbg("matching field setting: ~p with user friendly syntax: ~p~n",[Setting,W]),
- WorS =
- case Setting of
- {object,_,_} -> Setting;
- {_,_,WordOrSetting} -> WordOrSetting;
- Type when is_record(Type,type) -> Type;
- Other -> Other
- end,
- case lists:keysearch(W,2,ClassFields) of
- false ->
- throw({asn1,{mandatory_matcherror,WorS,W}});
- {value,CField} ->
- {NewField,RestFields} =
- convert_to_defaultfield(S,W,[WorS|Rest],CField),
- match_mandatory_field(S,RestFields,Ws,ClassFields,[NewField|Acc])
- end;
-
-match_mandatory_field(_S,[WorS|_Rest],[W|_Ws],_ClassFields,_Acc) ->
- throw({asn1,{mandatory_matcherror,WorS,W}}).
-
-%% Converts a field of an object from defined syntax to default syntax
-%% A field may be a type, a fixed type value, an object, an objectset,
-%%
-convert_to_defaultfield(S,ObjFieldName,[OFS|RestOFS],CField)->
- ?dbg("convert field: ~p of type: ~p~n",[ObjFieldName,element(1,CField)]),
- CurrMod = S#state.mname,
- Strip_value_tag =
- fun({value_tag,ValueSetting}) -> ValueSetting;
- (VS) -> VS
- end,
- ObjFieldSetting = Strip_value_tag(OFS),
- RestSettings = [Strip_value_tag(X)||X <- RestOFS],
- case element(1,CField) of
- typefield ->
- TypeDef=
- case ObjFieldSetting of
- TypeRec when is_record(TypeRec,type) -> TypeRec#type.def;
- TDef when is_record(TDef,typedef) ->
- TDef#typedef{checked=true,
- typespec=check_type(S,TDef,
- TDef#typedef.typespec)};
- _ -> ObjFieldSetting
- end,
- {Type,SettingsLeft} =
- if
- is_record(TypeDef,typedef) -> {TypeDef,RestSettings};
- is_record(TypeDef,'ObjectClassFieldType') ->
- T=check_type(S,#typedef{typespec=ObjFieldSetting},ObjFieldSetting),
- {oCFT_def(S,T),RestSettings};
-% #typedef{checked=true,name=Name,typespec=IT};
- is_tuple(TypeDef), element(1,TypeDef) == pt ->
- %% this is an inlined type. If constructed
- %% type save in data base
- T=check_type(S,#typedef{typespec=ObjFieldSetting},ObjFieldSetting),
- #'Externaltypereference'{type=PtName} =
- element(2,TypeDef),
- NameList = [PtName,S#state.tname],
- NewName = list_to_atom(asn1ct_gen:list2name(NameList)),
- NewTDef=#typedef{checked=true,name=NewName,
- typespec=T},
- asn1_db:dbput(S#state.mname,NewName,NewTDef),
- %%asn1ct_gen:insert_once(parameterized_objects,{NewName,type,NewTDef}),
- insert_once(S,parameterized_objects,
- {NewName,type,NewTDef}),
- {NewTDef,RestSettings};
- is_tuple(TypeDef), element(1,TypeDef)=='SelectionType' ->
- T=check_type(S,#typedef{typespec=ObjFieldSetting},
- ObjFieldSetting),
- Name = type_name(S,T),
- {#typedef{checked=true,name=Name,typespec=T},RestSettings};
- true ->
- case asn1ct_gen:type(asn1ct_gen:get_inner(TypeDef)) of
- ERef = #'Externaltypereference'{module=CurrMod} ->
- {RefMod,T} = get_referenced_type(S,ERef),
- check_and_save(S,ERef#'Externaltypereference'{module=RefMod},T,RestSettings);
-
- ERef = #'Externaltypereference'{} ->
- {RefMod,T} = get_referenced_type(S,ERef),
- check_and_save(S,ERef#'Externaltypereference'{module=RefMod},T,RestSettings);
- Bif when Bif=={primitive,bif};Bif=={constructed,bif} ->
- T = check_type(S,#typedef{typespec=ObjFieldSetting},
- ObjFieldSetting),
- {#typedef{checked=true,name=Bif,typespec=T},RestSettings};
- _ ->
- %this case should not happen any more
- {Mod,T} =
- get_referenced_type(S,#'Externaltypereference'{module=S#state.mname,type=ObjFieldSetting}),
- case Mod of
- CurrMod ->
- {T,RestSettings};
- ExtMod ->
- #typedef{name=Name} = T,
- {T#typedef{name={ExtMod,Name}},RestSettings}
- end
- end
- end,
- {{ObjFieldName,Type},SettingsLeft};
- fixedtypevaluefield ->
- case ObjFieldName of
- Val when is_atom(Val) ->
- %% ObjFieldSetting can be a value,an objectidentifiervalue,
- %% an element in an enumeration or namednumberlist etc.
- ValRef =
- case ObjFieldSetting of
- ValSetting=#'Externalvaluereference'{} ->
- ValSetting;
- {'ValueFromObject',{_,ObjRef},FieldName} ->
- {_,Object} = get_referenced_type(S,ObjRef),
- ChObject = check_object(S,Object,
- Object#typedef.typespec),
- get_fieldname_element(S,Object#typedef{typespec=ChObject},
- FieldName);
- ValSetting = #valuedef{} ->
- ValSetting;
- ValSetting ->
- #valuedef{type=element(3,CField),
- value=ValSetting,
- module=S#state.mname}
- end,
- ?dbg("fixedtypevaluefield ValRef: ~p~n",[ValRef]),
- case ValRef of
- #valuedef{} ->
- {{ObjFieldName,check_value(S,ValRef)},RestSettings};
- _ ->
- ValDef =
- case catch get_referenced_type(S,ValRef) of
- {error,_} ->
- NewValDef =
- #valuedef{name=Val,
- type=element(3,CField),
- value=ObjFieldSetting,
- module=S#state.mname},
- check_value(S,NewValDef);
- {M,VDef} when is_record(VDef,valuedef) ->
- check_value(update_state(S,M),
- %%S#state{mname=M},
- VDef);%% XXX
- {M,VDef} ->
- check_value(update_state(S,M),
- %%S#state{mname=M},
- #valuedef{name=Val,
- type=element(3,CField),
- value=VDef,
- module=M})
- end,
- {{ObjFieldName,ValDef},RestSettings}
- end;
- Val ->
- {{ObjFieldName,Val},RestSettings}
- end;
- fixedtypevaluesetfield ->
- {{ObjFieldName,ObjFieldSetting},RestSettings};
- objectfield ->
- CheckObject =
- fun(O) ->
- O#typedef{checked=true,typespec=
- check_object(S,O,O#typedef.typespec)}
- end,
- ObjectSpec =
- case ObjFieldSetting of
- Ref when is_record(Ref,'Externalvaluereference') ->
- %% The object O might be a #valuedef{} if
- %% e.g. the definition looks like
- %% myobj SOMECLASS ::= referencedObject
- {M,O} = get_referenced_type(S,Ref),
- check_object(S,O,object_to_check(O)),
- Ref#'Externalvaluereference'{module=M};
-
- {'ValueFromObject',{_,ObjRef},FieldName} ->
- %% This is an ObjectFromObject
- {_,Object} = get_referenced_type(S,ObjRef),
- ChObject = check_object(S,Object,
- Object#typedef.typespec),
- ObjFromObj=
- get_fieldname_element(S,Object#typedef{
- typespec=ChObject},
- FieldName),
- CheckObject(ObjFromObj);
- ObjDef={object,_,_} ->
- %% An object defined inlined in another object
- %% class is an objectfield, that implies that
- %% {objectsetfield,TypeFieldName,DefinedObjecClass,
- %% OptionalitySpec}
- %% DefinedObjecClass = #'Externaltypereference'{}|
- %% 'TYPE-IDENTIFIER' | 'ABSTRACT-SYNTAX'
- ClassName = element(3,CField),
- InlinedObjName=
- list_to_atom(lists:concat([S#state.tname]++
- ['_',ObjFieldName])),
-
- ObjSpec = #'Object'{classname=ClassName,
- def=ObjDef},
- CheckedObj=
- check_object(S,#typedef{typespec=ObjSpec},ObjSpec),
- InlObj = #typedef{checked=true,name=InlinedObjName,
- typespec=CheckedObj},
- ObjKey = {InlinedObjName,InlinedObjName},
- %% asn1ct_gen:insert_once(inlined_objects,ObjKey),
- insert_once(S,inlined_objects,ObjKey),
- %% Which module to use here? Could it be other than top_module ?
- %% asn1_db:dbput(S#state.mname,InlinedObjName,InlObj),
- asn1_db:dbput(get(top_module),InlinedObjName,InlObj),
- InlObj;
- #type{def=Eref} when is_record(Eref,'Externaltypereference') ->
- {_,O} = get_referenced_type(S,Eref),
- CheckObject(O);
- Other ->
- {_,O} = get_referenced_type(S,#'Externaltypereference'{module=S#state.mname,type=Other}),
- CheckObject(O)
- end,
- {{ObjFieldName,ObjectSpec},RestSettings};
- variabletypevaluefield ->
- {{ObjFieldName,ObjFieldSetting},RestSettings};
- variabletypevaluesetfield ->
- {{ObjFieldName,ObjFieldSetting},RestSettings};
-%% objectset_or_fixedtypevalueset_field ->
-%% ok;
- objectsetfield ->
- ObjSetSpec = get_objectset_def(S,ObjFieldSetting,CField),
- ?dbg("objectsetfield, ObjSetSpec:~p~n",[ObjSetSpec]),
- {{ObjFieldName,
- ObjSetSpec#typedef{checked=true,
- typespec=check_object(S,ObjSetSpec,
- ObjSetSpec#typedef.typespec)}},RestSettings}
- end.
-
-get_objectset_def(S,Ref,CField)
- when is_record(Ref,'Externaltypereference');
- is_record(Ref,'Externalvaluereference') ->
- {_M,T}=get_referenced_type(S,Ref),
- get_objectset_def2(S,T,CField);
-get_objectset_def(S,ObjectList,CField) when is_list(ObjectList) ->
- %% an objctset defined in the object,though maybe
- %% parsed as a SequenceOfValue
- %% The ObjectList may be a list of references to
- %% objects, a ValueFromObject
- ?dbg("objectsetfield: ~p~n",[CField]),
- get_objectset_def2(S,ObjectList,CField);
-get_objectset_def(S,'EXTENSIONMARK',CField) ->
- ?dbg("objectsetfield: ~p~n",[CField]),
- get_objectset_def2(S,['EXTENSIONMARK'],CField);
-get_objectset_def(_S,ObjFieldSetting={'SingleValue',_},CField) ->
- %% a Union of defined objects
- ?dbg("objectsetfield, SingleValue~n",[]),
- union_of_defed_objs(CField,ObjFieldSetting);
-get_objectset_def(_S,ObjFieldSetting={{'SingleValue',_},_},CField) ->
- %% a Union of defined objects
- ?dbg("objectsetfield, SingleValue~n",[]),
- union_of_defed_objs(CField,ObjFieldSetting);
-get_objectset_def(S,{object,_,[#type{def={'TypeFromObject',
- {object,RefedObj},
- FieldName}}]},_CField) ->
- %% This case occurs when an ObjectSetFromObjects
- %% production is used
- {_M,Def} = get_referenced_type(S,RefedObj),
- get_fieldname_element(S,Def,FieldName);
-get_objectset_def(S,{object,_,[{setting,_,ERef}]},CField)
- when is_record(ERef,'Externaltypereference') ->
- {_,T} = get_referenced_type(S,ERef),
- get_objectset_def2(S,T,CField);
-get_objectset_def(S,#type{def=ERef},_CField)
- when is_record(ERef,'Externaltypereference') ->
- {_,T} = get_referenced_type(S,ERef),
- T;
-get_objectset_def(S,ObjFieldSetting,CField)
- when is_atom(ObjFieldSetting) ->
- ERef = #'Externaltypereference'{module=S#state.mname,
- type=ObjFieldSetting},
- {_,T} = get_referenced_type(S,ERef),
- get_objectset_def2(S,T,CField).
-
-get_objectset_def2(_S,T = #typedef{typespec=#'Object'{}},_CField) ->
- #typedef{typespec=#'Object'{classname=Class,def=Def}} = T,
- T#typedef{typespec=#'ObjectSet'{class=Class,set=[Def]}};
-get_objectset_def2(_S,Set,CField) when is_list(Set) ->
- {_,_,Type,_} = CField,
- ClassDef = Type#type.def,
- #typedef{typespec=#'ObjectSet'{class=ClassDef,
- set=Set}};
-get_objectset_def2(_S,T = #typedef{typespec=#'ObjectSet'{}},_CField) ->
- T;
-get_objectset_def2(S,T,_CField) ->
- asn1ct:warning("get_objectset_def2: uncontrolled object set structure:~n~p~n",
- [T],S,"get_objectset_def2: uncontrolled object set structure").
-
-type_name(S,#type{def=Def}) ->
- CurrMod = S#state.mname,
- case asn1ct_gen:type(asn1ct_gen:get_inner(Def)) of
- #'Externaltypereference'{module=CurrMod,type=Name} ->
- Name;
- #'Externaltypereference'{module=Mod,type=Name} ->
- {Mod,Name};
- Bif when Bif=={primitive,bif};Bif=={constructed,bif} ->
- Bif
- end.
+is_mandatory_class_field({fixedtypevaluefield,_,_,_,'MANDATORY'}) ->
+ true;
+is_mandatory_class_field({objectfield,_,_,_,'MANDATORY'}) ->
+ true;
+is_mandatory_class_field({objectsetfield,_,_,'MANDATORY'}) ->
+ true;
+is_mandatory_class_field({typefield,_,'MANDATORY'}) ->
+ true;
+is_mandatory_class_field({variabletypevaluefield,_,_,'MANDATORY'}) ->
+ true;
+is_mandatory_class_field({variabletypevaluesetfield,_,_,'MANDATORY'}) ->
+ true;
+is_mandatory_class_field(_) ->
+ false.
merged_name(#state{inputmodules=[]},ERef) ->
ERef;
@@ -2013,38 +1649,18 @@ merged_name(S,ERef=#'Externaltypereference'{module=M}) ->
ERef
end.
-oCFT_def(S,T) ->
- case get_OCFT_inner(S,T) of
- ERef=#'Externaltypereference'{} -> ERef;
- {Name,Type} -> #typedef{checked=true,name=Name,typespec=Type};
- 'ASN1_OPEN_TYPE' ->
- #typedef{checked=true,typespec=T#type{def='ASN1_OPEN_TYPE'}}
- end.
-
-get_OCFT_inner(_S,T) ->
-% Module=S#state.mname,
- Def = T#type.def,
- case Def#'ObjectClassFieldType'.type of
+ocft_def(#type{def=#'ObjectClassFieldType'{type=OCFT}}=T) ->
+ case OCFT of
{fixedtypevaluefield,_,InnerType} ->
case asn1ct_gen:type(asn1ct_gen:get_inner(InnerType#type.def)) of
- Bif when Bif=={primitive,bif};Bif=={constructed,bif} ->
- {Bif,InnerType};
- ERef = #'Externaltypereference'{} ->
- ERef
+ Bif when Bif =:= {primitive,bif}; Bif =:= {constructed,bif} ->
+ #typedef{checked=true,name=Bif,typespec=InnerType};
+ #'Externaltypereference'{}=Ref ->
+ Ref
end;
- 'ASN1_OPEN_TYPE' -> 'ASN1_OPEN_TYPE'
+ 'ASN1_OPEN_TYPE' ->
+ #typedef{checked=true,typespec=T#type{def='ASN1_OPEN_TYPE'}}
end.
-
-
-
-union_of_defed_objs({_,_,_ObjClass=#type{def=ClassDef},_},ObjFieldSetting) ->
- #typedef{typespec=#'ObjectSet'{class = ClassDef,
- set = ObjFieldSetting}};
-union_of_defed_objs({_,_,DefObjClassRef,_},ObjFieldSetting)
- when is_record(DefObjClassRef,'Externaltypereference') ->
- #typedef{typespec=#'ObjectSet'{class = DefObjClassRef,
- set = ObjFieldSetting}}.
-
check_value(OldS,V) when is_record(V,pvaluesetdef) ->
#pvaluesetdef{checked=Checked,type=Type} = V,
@@ -2068,8 +1684,7 @@ check_value(OldS,V) when is_record(V,typedef) ->
#typedef{typespec=TS} = V,
case TS of
#'ObjectSet'{class=ClassRef} ->
- {RefM,TSDef} = get_referenced_type(OldS,ClassRef),
- %%IsObjectSet(TSDef);
+ {_RefM,TSDef} = get_referenced_type(OldS, ClassRef),
case TSDef of
#classdef{} -> throw({objectsetdef});
#typedef{typespec=#type{def=Eref}} when
@@ -2077,14 +1692,12 @@ check_value(OldS,V) when is_record(V,typedef) ->
%% This case if the class reference is a defined
%% reference to class
check_value(OldS,V#typedef{typespec=TS#'ObjectSet'{class=Eref}});
- #typedef{} ->
+ #typedef{typespec=HostType} ->
% an ordinary value set with a type in #typedef.typespec
- ValueSet = TS#'ObjectSet'.set,
- Type=check_type(OldS,TSDef,TSDef#typedef.typespec),
- Value = check_value(OldS,#valuedef{type=Type,
- value=ValueSet,
- module=RefM}),
- {valueset,Type#type{constraint=Value#valuedef.value}}
+ ValueSet0 = TS#'ObjectSet'.set,
+ Constr = check_constraints(OldS, HostType, [ValueSet0]),
+ Type = check_type(OldS,TSDef,TSDef#typedef.typespec),
+ {valueset,Type#type{constraint=Constr}}
end;
_ ->
throw({objectsetdef})
@@ -2104,11 +1717,11 @@ check_value(S, #valuedef{}=V) ->
end.
check_valuedef(#state{recordtopname=TopName}=S0, V0) ->
- #valuedef{name=Name,type=Vtype,value=Value,module=ModName} = V0,
+ #valuedef{name=Name,type=Vtype0,value=Value,module=ModName} = V0,
V = V0#valuedef{checked=true},
+ Vtype = check_type(S0, #typedef{name=Name,typespec=Vtype0},Vtype0),
Def = Vtype#type.def,
- Constr = Vtype#type.constraint,
- S1 = S0#state{type=Vtype,tname=Def,value=V0,vname=Name},
+ S1 = S0#state{tname=Def},
SVal = update_state(S1, ModName),
case Def of
#'Externaltypereference'{type=RecName}=Ext ->
@@ -2116,9 +1729,8 @@ check_valuedef(#state{recordtopname=TopName}=S0, V0) ->
%% If V isn't a value but an object Type is a #classdef{}
S2 = update_state(S1, RefM),
case Type of
- #classdef{} ->
- throw({objectdef});
- #typedef{typespec=TypeSpec} ->
+ #typedef{typespec=TypeSpec0}=TypeDef ->
+ TypeSpec = check_type(S2, TypeDef, TypeSpec0),
S3 = case is_contextswitchtype(Type) of
true ->
S2;
@@ -2135,7 +1747,7 @@ check_valuedef(#state{recordtopname=TopName}=S0, V0) ->
V#valuedef{type=Type}),
V#valuedef{value=CheckedVal}
end;
- 'ANY' ->
+ 'ASN1_OPEN_TYPE' ->
{opentypefieldvalue,ANYType,ANYValue} = Value,
CheckedV = check_value(SVal,#valuedef{name=Name,
type=ANYType,
@@ -2143,19 +1755,12 @@ check_valuedef(#state{recordtopname=TopName}=S0, V0) ->
module=ModName}),
V#valuedef{value=CheckedV#valuedef.value};
'INTEGER' ->
- ok = validate_integer(SVal, Value, [], Constr),
V#valuedef{value=normalize_value(SVal, Vtype, Value, [])};
- {'INTEGER',NamedNumberList} ->
- ok = validate_integer(SVal, Value, NamedNumberList, Constr),
+ {'INTEGER',_NamedNumberList} ->
V#valuedef{value=normalize_value(SVal, Vtype, Value, [])};
#'SEQUENCE'{} ->
- {ok,SeqVal} = convert_external(SVal, Value),
+ {ok,SeqVal} = convert_external(SVal, Vtype, Value),
V#valuedef{value=normalize_value(SVal, Vtype, SeqVal, TopName)};
- {'SelectionType',SelName,SelT} ->
- CheckedT = check_selectiontype(SVal, SelName, SelT),
- NewV = V#valuedef{type=CheckedT},
- SelVDef = check_value(S1#state{value=NewV}, NewV),
- V#valuedef{value=SelVDef#valuedef.value};
_ ->
V#valuedef{value=normalize_value(SVal, Vtype, Value, TopName)}
end.
@@ -2169,179 +1774,97 @@ is_contextswitchtype(#typedef{name='CHARACTER STRING'}) ->
is_contextswitchtype(_) ->
false.
-% validate_integer(S,{identifier,Pos,Id},NamedNumberList,Constr) ->
-% case lists:keysearch(Id,1,NamedNumberList) of
-% {value,_} -> ok;
-% false -> error({value,"unknown NamedNumber",S})
-% end;
-%% This case occurs when there is a valuereference
-%% validate_integer(S=#state{mname=M},
-%% #'Externalvaluereference'{module=M,value=Id}=Ref,
-validate_integer(S,#'Externalvaluereference'{value=Id}=Ref,
- NamedNumberList,Constr) ->
- case lists:keysearch(Id,1,NamedNumberList) of
- {value,_} -> ok;
- false -> validate_integer_ref(S,Ref,NamedNumberList,Constr)
- %%error({value,"unknown NamedNumber",S})
- end;
-validate_integer(S,Id,NamedNumberList,Constr) when is_atom(Id) ->
- case lists:keysearch(Id,1,NamedNumberList) of
- {value,_} -> ok;
- false -> validate_integer_ref(S,Id,NamedNumberList,Constr)
- %error({value,"unknown NamedNumber",S})
+%%%
+%%% Start of OBJECT IDENTFIER/RELATIVE-OID validation.
+%%%
+
+validate_objectidentifier(S, OidType, #'Externalvaluereference'{}=Id) ->
+ %% Must be an OBJECT IDENTIFIER or RELATIVE-OID depending on OidType.
+ get_oid_value(S, OidType, false, Id);
+validate_objectidentifier(S, OidType, {'ValueFromObject',{object,Obj},Fields}) ->
+ %% Must be an OBJECT IDENTIFIER/RELATIVE-OID depending on OidType.
+ case extract_field(S, Obj, Fields) of
+ #valuedef{checked=true,value=Value,type=Type} when is_tuple(Value) ->
+ _ = get_oid_type(S, OidType, Type),
+ Value;
+ _ ->
+ asn1_error(S, {illegal_oid,OidType})
end;
-validate_integer(_S,Value,_NamedNumberList,Constr) when is_integer(Value) ->
- check_integer_range(Value,Constr).
-
-validate_integer_ref(S,Id,_,_) when is_atom(Id) ->
- error({value,"unknown integer referens",S});
-validate_integer_ref(S,Ref,NamedNumberList,Constr) ->
- case get_referenced_type(S,Ref) of
- {M,V} when is_record(V,valuedef) ->
- NewS = update_state(S,M),
- case check_value(NewS,V) of
- #valuedef{type=#type{def='INTEGER'},value=Value} ->
- validate_integer(NewS,Value,NamedNumberList,Constr);
- _Err -> error({value,"unknown integer referens",S})
+validate_objectidentifier(S, OidType,
+ [{#seqtag{module=Mod,pos=Pos,val=Atom},Val}]) ->
+ %% This case is when an OBJECT IDENTIFIER value has been parsed as a
+ %% SEQUENCE value.
+ Rec = #'Externalvaluereference'{pos=Pos,
+ module=Mod,
+ value=Atom},
+ validate_oid(S, OidType, [Rec,Val], []);
+validate_objectidentifier(S, OidType, [_|_]=L0) ->
+ validate_oid(S, OidType, L0, []);
+validate_objectidentifier(S, OidType, _) ->
+ asn1_error(S, {illegal_oid,OidType}).
+
+get_oid_value(S, OidType, AllowInteger, #'Externalvaluereference'{}=Id) ->
+ case get_referenced_type(S, Id) of
+ {_,#valuedef{checked=Checked,type=Type,value=V}} ->
+ case get_oid_type(S, OidType, Type) of
+ 'INTEGER' when not AllowInteger ->
+ asn1_error(S, {illegal_oid,OidType});
+ _ when Checked ->
+ V;
+ 'INTEGER' ->
+ V;
+ _ ->
+ validate_objectidentifier(S, OidType, V)
end;
_ ->
- error({value,"unknown integer referens",S})
+ asn1_error(S, {illegal_oid,OidType})
end.
-
-
-
-check_integer_range(_Int, Constr) when is_list(Constr) ->
- ok.
-%%------------
-%% This can be removed when the old parser is removed
-%% The function removes 'space' atoms from the list
-
-is_space_list([H],Acc) ->
- lists:reverse([H|Acc]);
-is_space_list([H,space|T],Acc) ->
- is_space_list(T,[H|Acc]);
-is_space_list([],Acc) ->
- lists:reverse(Acc);
-is_space_list([H|T],Acc) ->
- is_space_list(T,[H|Acc]).
-
-validate_objectidentifier(S,OID,ERef,C)
- when is_record(ERef,'Externalvaluereference') ->
- validate_objectidentifier(S,OID,[ERef],C);
-validate_objectidentifier(S,OID,Tup,C) when is_tuple(Tup) ->
- validate_objectidentifier(S,OID,tuple_to_list(Tup),C);
-validate_objectidentifier(S,OID,L,_) ->
- NewL = is_space_list(L,[]),
- case validate_objectidentifier1(S,OID,NewL) of
- NewL2 when is_list(NewL2) ->{ok,list_to_tuple(NewL2)};
- Other -> {ok,Other}
- end.
-
-validate_objectidentifier1(S, OID, [Id|T])
- when is_record(Id,'Externalvaluereference') ->
- case catch get_referenced_type(S,Id) of
- {M,V} when is_record(V,valuedef) ->
- NewS = update_state(S,M),
- case check_value(NewS,V) of
- #valuedef{type=#type{def=ERef},checked=true,
- value=Value} when is_tuple(Value) ->
- case is_object_id(OID,NewS,ERef) of
- true ->
- %% T must be a RELATIVE-OID
- validate_oid(true,NewS, rel_oid, T, lists:reverse(tuple_to_list(Value)));
- _ ->
- error({value, {"illegal "++to_string(OID),[Id|T]}, S})
- end;
- _ ->
- error({value, {"illegal "++to_string(OID),[Id|T]}, S})
- end;
- _ ->
- validate_oid(true,S, OID, [Id|T], [])
- end;
-validate_objectidentifier1(S,OID,V) ->
- validate_oid(true,S,OID,V,[]).
-
-validate_oid(false, S, OID, V, Acc) ->
- error({value, {"illegal "++to_string(OID), V,Acc}, S});
-validate_oid(_,_, _, [], Acc) ->
- lists:reverse(Acc);
-validate_oid(_, S, OID, [Value|Vrest], Acc) when is_integer(Value) ->
- validate_oid(valid_objectid(OID,Value,Acc),S, OID, Vrest, [Value|Acc]);
-validate_oid(_, S, OID, [{'NamedNumber',_Name,Value}|Vrest], Acc)
+validate_oid(S, OidType, [], Acc) ->
+ Oid = lists:reverse(Acc),
+ validate_oid_path(S, OidType, Oid),
+ list_to_tuple(Oid);
+validate_oid(S, OidType, [Value|Vrest], Acc) when is_integer(Value) ->
+ validate_oid(S, OidType, Vrest, [Value|Acc]);
+validate_oid(S, OidType, [{'NamedNumber',_Name,Value}|Vrest], Acc)
when is_integer(Value) ->
- validate_oid(valid_objectid(OID,Value,Acc), S, OID, Vrest, [Value|Acc]);
-validate_oid(_, S, OID, [Id|Vrest], Acc)
- when is_record(Id,'Externalvaluereference') ->
- case catch get_referenced_type(S, Id) of
- {M,V} when is_record(V,valuedef) ->
- NewS = update_state(S,M),
- NewVal = case check_value(NewS, V) of
- #valuedef{checked=true,value=Value} ->
- fun(Int) when is_integer(Int) -> [Int];
- (L) when is_list(L) -> L;
- (T) when is_tuple(T) -> tuple_to_list(T)
- end (Value);
- _ ->
- error({value, {"illegal "++to_string(OID),
- [Id|Vrest],Acc}, S})
- end,
- case NewVal of
- List when is_list(List) ->
- validate_oid(valid_objectid(OID,NewVal,Acc), NewS,
- OID, Vrest,lists:reverse(NewVal)++Acc);
- _ ->
- NewVal
- end;
- _ ->
+ validate_oid(S, OidType, Vrest, [Value|Acc]);
+validate_oid(S, OidType, [#'Externalvaluereference'{}=Id|Vrest], Acc) ->
+ NeededOidType = case Acc of
+ [] -> o_id;
+ [_|_] -> rel_oid
+ end,
+ try get_oid_value(S, NeededOidType, true, Id) of
+ Val when is_integer(Val) ->
+ validate_oid(S, OidType, Vrest, [Val|Acc]);
+ Val when is_tuple(Val) ->
+ L = tuple_to_list(Val),
+ validate_oid(S, OidType, Vrest, lists:reverse(L, Acc))
+ catch
+ _:_ ->
case reserved_objectid(Id#'Externalvaluereference'.value, Acc) of
Value when is_integer(Value) ->
- validate_oid(valid_objectid(OID,Value,Acc),
- S, OID,Vrest, [Value|Acc]);
+ validate_oid(S, OidType,Vrest, [Value|Acc]);
false ->
- error({value, {"illegal "++to_string(OID),[Id,Vrest],Acc}, S})
+ asn1_error(S, {illegal_oid,OidType})
end
end;
-validate_oid(_, S, OID, [{#seqtag{module=Mod,val=Atom},Value}], [])
- when is_atom(Atom),is_integer(Value) ->
- %% this case when an OBJECT IDENTIFIER value has been parsed as a
- %% SEQUENCE value
- Rec = #'Externalvaluereference'{module=Mod,
- value=Atom},
- validate_objectidentifier1(S, OID, [Rec,Value]);
-validate_oid(_, S, OID, [{#seqtag{module=Mod,val=Atom},EVRef}], [])
- when is_atom(Atom),is_record(EVRef,'Externalvaluereference') ->
- %% this case when an OBJECT IDENTIFIER value has been parsed as a
- %% SEQUENCE value OTP-4354
- Rec = #'Externalvaluereference'{module=Mod,
- value=Atom},
- validate_objectidentifier1(S, OID, [Rec,EVRef]);
-validate_oid(_, S, OID, [#seqtag{module=Mod,val=Atom}|Rest], Acc)
- when is_atom(Atom) ->
- Rec = #'Externalvaluereference'{module=Mod,
- value=Atom},
- validate_oid(true,S, OID, [Rec|Rest],Acc);
-validate_oid(_, S, OID, V, Acc) ->
- error({value, {"illegal "++to_string(OID),V,Acc},S}).
-
-is_object_id(OID,S,ERef=#'Externaltypereference'{}) ->
- {_,OI} = get_referenced_type(S,ERef),
- is_object_id(OID,S,OI#typedef.typespec);
-is_object_id(o_id,_S,'OBJECT IDENTIFIER') ->
- true;
-is_object_id(rel_oid,_S,'RELATIVE-OID') ->
- true;
-is_object_id(_,_S,'INTEGER') ->
- true;
-is_object_id(OID,S,#type{def=Def}) ->
- is_object_id(OID,S,Def);
-is_object_id(_,_S,_) ->
- false.
-
-to_string(o_id) ->
- "OBJECT IDENTIFIER";
-to_string(rel_oid) ->
- "RELATIVE-OID".
+validate_oid(S, OidType, _V, _Acc) ->
+ asn1_error(S, {illegal_oid,OidType}).
+
+get_oid_type(S, OidType, #type{def=Def}) ->
+ get_oid_type(S, OidType, Def);
+get_oid_type(S, OidType, #'Externaltypereference'{}=Id) ->
+ {_,OI} = get_referenced_type(S, Id),
+ get_oid_type(S, OidType, OI#typedef.typespec);
+get_oid_type(_S, o_id, 'OBJECT IDENTIFIER'=T) ->
+ T;
+get_oid_type(_S, rel_oid, 'RELATIVE-OID'=T) ->
+ T;
+get_oid_type(_S, _, 'INTEGER'=T) ->
+ T;
+get_oid_type(S, OidType, _) ->
+ asn1_error(S, {illegal_oid,OidType}).
%% ITU-T Rec. X.680 Annex B - D
reserved_objectid('itu-t',[]) -> 0;
@@ -2380,7 +1903,6 @@ reserved_objectid('x',[0,0]) -> 24;
reserved_objectid('y',[0,0]) -> 25;
reserved_objectid('z',[0,0]) -> 26;
-
reserved_objectid(iso,[]) -> 1;
%% arcs below "iso", note that number 1 is not used
reserved_objectid('standard',[1]) -> 0;
@@ -2392,25 +1914,22 @@ reserved_objectid('joint-iso-ccitt',[]) -> 2;
reserved_objectid(_,_) -> false.
-valid_objectid(_OID,[],_Acc) ->
- true;
-valid_objectid(OID,[H|T],Acc) ->
- case valid_objectid(OID, H, Acc) of
- true ->
- valid_objectid(OID,T,[H|Acc]);
- _ ->
- false
- end;
-valid_objectid(o_id,I,[]) when I =:= 0; I =:= 1; I =:= 2 -> true;
-valid_objectid(o_id,_I,[]) -> false;
-valid_objectid(o_id,I,[0]) when I >= 0; I =< 4 -> true;
-valid_objectid(o_id,_I,[0]) -> false;
-valid_objectid(o_id,I,[1]) when I =:= 0; I =:= 2; I =:= 3 -> true;
-valid_objectid(o_id,_I,[1]) -> false;
-valid_objectid(o_id,_I,[2]) -> true;
-valid_objectid(_,_,_) -> true.
-
-convert_external(S=#state{type=Vtype}, Value) ->
+validate_oid_path(_, rel_oid, _) ->
+ ok;
+validate_oid_path(_, o_id, [0,I|_]) when 0 =< I, I =< 9 ->
+ ok;
+validate_oid_path(_, o_id, [1,I|_]) when 0 =< I, I =< 3 ->
+ ok;
+validate_oid_path(_, o_id, [2|_]) ->
+ ok;
+validate_oid_path(S, o_id=OidType, _) ->
+ asn1_error(S, {illegal_oid,OidType}).
+
+%%%
+%%% End of OBJECT IDENTFIER/RELATIVE-OID validation.
+%%%
+
+convert_external(S, Vtype, Value) ->
case Vtype of
#type{tag=[{tag,'UNIVERSAL',8,'IMPLICIT',32}]} ->
%% this is an 'EXTERNAL' (or INSTANCE OF)
@@ -2435,7 +1954,7 @@ to_EXTERNAL1990(S, [{#seqtag{val=identification}=T,
to_EXTERNAL1990(S, Rest, [{T#seqtag{val='indirect-reference'},PCid},
{T#seqtag{val='direct-reference'},TrStx}]);
to_EXTERNAL1990(S, _) ->
- error({value,"illegal value in EXTERNAL type",S}).
+ asn1_error(S, illegal_external_value).
to_EXTERNAL1990(S, [V={#seqtag{val='data-value-descriptor'},_}|Rest], Acc) ->
to_EXTERNAL1990(S, Rest, [V|Acc]);
@@ -2443,7 +1962,7 @@ to_EXTERNAL1990(_S, [{#seqtag{val='data-value'}=T,Val}], Acc) ->
Encoding = {T#seqtag{val=encoding},{'CHOICE',{'octet-aligned',Val}}},
lists:reverse([Encoding|Acc]);
to_EXTERNAL1990(S, _, _) ->
- error({value,"illegal value in EXTERNAL type",S}).
+ asn1_error(S, illegal_external_value).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Functions to normalize the default values of SEQUENCE
@@ -2453,17 +1972,16 @@ normalize_value(_,_,mandatory,_) ->
mandatory;
normalize_value(_,_,'OPTIONAL',_) ->
'OPTIONAL';
-normalize_value(S0, Type, {'DEFAULT',Value}, NameList) ->
- S = S0#state{value=Value},
+normalize_value(S, Type, {'DEFAULT',Value}, NameList) ->
case catch get_canonic_type(S,Type,NameList) of
{'BOOLEAN',CType,_} ->
normalize_boolean(S,Value,CType);
{'INTEGER',CType,_} ->
- normalize_integer(S,Value,CType);
+ normalize_integer(S, Value, CType);
{'BIT STRING',CType,_} ->
normalize_bitstring(S,Value,CType);
- {'OCTET STRING',CType,_} ->
- normalize_octetstring(S0, Value, CType);
+ {'OCTET STRING',_,_} ->
+ normalize_octetstring(S, Value);
{'NULL',_CType,_} ->
%%normalize_null(Value);
'NULL';
@@ -2499,39 +2017,41 @@ normalize_value(S0, Type, {'DEFAULT',Value}, NameList) ->
normalize_value(S,Type,Val,NameList) ->
normalize_value(S,Type,{'DEFAULT',Val},NameList).
-normalize_boolean(S,{Name,Bool},CType) when is_atom(Name) ->
- normalize_boolean(S,Bool,CType);
normalize_boolean(_,true,_) ->
true;
normalize_boolean(_,false,_) ->
false;
normalize_boolean(S,Bool=#'Externalvaluereference'{},CType) ->
get_normalized_value(S,Bool,CType,fun normalize_boolean/3,[]);
-normalize_boolean(_,Other,_) ->
- throw({error,{asn1,{'invalid default value',Other}}}).
+normalize_boolean(S, _, _) ->
+ asn1_error(S, {illegal_value, "BOOLEAN"}).
-normalize_integer(_S,Int,_) when is_integer(Int) ->
+normalize_integer(_S, Int, _) when is_integer(Int) ->
Int;
-normalize_integer(_S,{Name,Int},_) when is_atom(Name),is_integer(Int) ->
- Int;
-normalize_integer(S,{Name,Int=#'Externalvaluereference'{}},
- Type) when is_atom(Name) ->
- normalize_integer(S,Int,Type);
-normalize_integer(S,Int=#'Externalvaluereference'{value=Name},Type) ->
- case Type of
- NNL when is_list(NNL) ->
- case lists:keysearch(Name,1,NNL) of
- {value,{Name,Val}} ->
+normalize_integer(S, #'Externalvaluereference'{value=Name}=Ref, NNL) ->
+ case lists:keyfind(Name, 1, NNL) of
+ {Name,Val} ->
+ Val;
+ false ->
+ try get_referenced_value(S, Ref) of
+ Val when is_integer(Val) ->
Val;
- false ->
- get_normalized_value(S,Int,Type,
- fun normalize_integer/3,[])
- end;
+ _ ->
+ asn1_error(S, illegal_integer_value)
+ catch
+ throw:_ ->
+ asn1_error(S, illegal_integer_value)
+ end
+ end;
+normalize_integer(S, {'ValueFromObject',{object,Obj},FieldNames}, _) ->
+ case extract_field(S, Obj, FieldNames) of
+ #valuedef{value=Val} when is_integer(Val) ->
+ Val;
_ ->
- get_normalized_value(S,Int,Type,fun normalize_integer/3,[])
+ asn1_error(S, illegal_integer_value)
end;
-normalize_integer(_,Int,_) ->
- exit({'Unknown INTEGER value',Int}).
+normalize_integer(S, _, _) ->
+ asn1_error(S, illegal_integer_value).
%% normalize_bitstring(S, Value, Type) -> bitstring()
%% Convert a literal value for a BIT STRING to an Erlang bit string.
@@ -2543,36 +2063,34 @@ normalize_bitstring(S, Value, Type)->
{bstring,String} when is_list(String) ->
bstring_to_bitstring(String);
#'Externalvaluereference'{} ->
- get_normalized_value(S, Value, Type,
- fun normalize_bitstring/3, []);
- RecList when is_list(RecList) ->
- F = fun(#'Externalvaluereference'{value=Name}) ->
- case lists:keymember(Name, 1, Type) of
- true -> Name;
- false -> throw({error,false})
- end;
- (Name) when is_atom(Name) ->
- %% Already normalized.
- Name;
- (Other) ->
- throw({error,Other})
- end,
- try
- lists:map(F, RecList)
- catch
- throw:{error,Reason} ->
- asn1ct:warning("default value not "
- "compatible with type definition ~p~n",
- [Reason],S,
- "default value not "
- "compatible with type definition"),
- Value
+ Val = get_referenced_value(S, Value),
+ normalize_bitstring(S, Val, Type);
+ {'ValueFromObject',{object,Obj},FieldNames} ->
+ case extract_field(S, Obj, FieldNames) of
+ #valuedef{value=Val} ->
+ normalize_bitstring(S, Val, Type);
+ _ ->
+ asn1_error(S, {illegal_value, "BIT STRING"})
end;
+ RecList when is_list(RecList) ->
+ [normalize_bs_item(S, Item, Type) || Item <- RecList];
Bs when is_bitstring(Bs) ->
%% Already normalized.
- Bs
+ Bs;
+ _ ->
+ asn1_error(S, {illegal_value, "BIT STRING"})
end.
+normalize_bs_item(S, #'Externalvaluereference'{value=Name}, Type) ->
+ case lists:keymember(Name, 1, Type) of
+ true -> Name;
+ false -> asn1_error(S, {illegal_value, "BIT STRING"})
+ end;
+normalize_bs_item(_, Atom, _) when is_atom(Atom) ->
+ Atom;
+normalize_bs_item(S, _, _) ->
+ asn1_error(S, {illegal_value, "BIT STRING"}).
+
hstring_to_binary(L) ->
byte_align(hstring_to_bitstring(L)).
@@ -2600,29 +2118,35 @@ hex_to_int(D) when $A =< D, D =< $F -> D - ($A - 10).
%% {bstring,String} each element in String corresponds to one bit in an octet
%% {hstring,String} each element in String corresponds to one byte in an octet
%% #'Externalvaluereference'
-normalize_octetstring(S,Value,CType) ->
+normalize_octetstring(S, Value) ->
case Value of
{bstring,String} ->
bstring_to_binary(String);
{hstring,String} ->
hstring_to_binary(String);
- Rec when is_record(Rec,'Externalvaluereference') ->
- get_normalized_value(S,Value,CType,
- fun normalize_octetstring/3,[]);
- {Name,String} when is_atom(Name) ->
- normalize_octetstring(S,String,CType);
+ #'Externalvaluereference'{} ->
+ case get_referenced_value(S, Value) of
+ String when is_binary(String) ->
+ String;
+ Other ->
+ normalize_octetstring(S, Other)
+ end;
+ {'ValueFromObject',{object,Obj},FieldNames} ->
+ case extract_field(S, Obj, FieldNames) of
+ #valuedef{value=Val} when is_binary(Val) ->
+ Val;
+ _ ->
+ asn1_error(S, illegal_octet_string_value)
+ end;
_ ->
- Item = S#state.value,
- asn1_error(S, Item, illegal_octet_string_value)
+ asn1_error(S, illegal_octet_string_value)
end.
normalize_objectidentifier(S, Value) ->
- {ok,Val} = validate_objectidentifier(S, o_id, Value, []),
- Val.
+ validate_objectidentifier(S, o_id, Value).
-normalize_relative_oid(S,Value) ->
- {ok,Val} = validate_objectidentifier(S, rel_oid, Value, []),
- Val.
+normalize_relative_oid(S, Value) ->
+ validate_objectidentifier(S, rel_oid, Value).
normalize_objectdescriptor(Value) ->
Value.
@@ -2644,40 +2168,22 @@ lookup_enum_value(S, Id, NNL) when is_atom(Id) ->
{_,_}=Ret ->
Ret;
false ->
- asn1_error(S, S#state.value, {undefined,Id})
+ asn1_error(S, {undefined,Id})
end.
-normalize_choice(S,{'CHOICE',{C,V}},CType,NameList) when is_atom(C) ->
- case catch lists:keysearch(C,#'ComponentType'.name,CType) of
- {value,#'ComponentType'{typespec=CT,name=Name}} ->
- {C,normalize_value(S,CT,{'DEFAULT',V},
- [Name|NameList])};
- Other ->
- asn1ct:warning("Wrong format of type/value ~p/~p~n",[Other,V],S,
- "Wrong format of type/value"),
- {C,V}
+normalize_choice(S, {'CHOICE',{C,V}}, CType, NameList)
+ when is_atom(C) ->
+ case lists:keyfind(C, #'ComponentType'.name, CType) of
+ #'ComponentType'{typespec=CT,name=Name} ->
+ {C,normalize_value(S, CT, {'DEFAULT',V}, [Name|NameList])};
+ false ->
+ asn1_error(S, {illegal_id,C})
end;
-normalize_choice(S,{'DEFAULT',ValueList},CType,NameList) when is_list(ValueList) ->
- lists:map(fun(X)-> normalize_choice(S,X,CType,NameList) end, ValueList);
-normalize_choice(S,Val=#'Externalvaluereference'{},CType,NameList) ->
- {M,#valuedef{value=V}}=get_referenced_type(S,Val),
- normalize_choice(update_state(S,M),{'CHOICE',V},CType,NameList);
-% get_normalized_value(S,Val,CType,fun normalize_choice/4,[NameList]);
-normalize_choice(S,CV={Name,_ChoiceVal},CType,NameList)
+normalize_choice(S,CV={Name,_ChoiceVal},CType,NameList)
when is_atom(Name) ->
-% normalize_choice(S,ChoiceVal,CType,NameList).
normalize_choice(S,{'CHOICE',CV},CType,NameList);
-normalize_choice(_S,V,_CType,_NameList) ->
- exit({error,{bad_choice_value,V}}).
-
-%% normalize_choice(NameList,S,CVal = {'CHOICE',{_,_}},CType,_) ->
-%% normalize_choice(S,CVal,CType,NameList);
-%% normalize_choice(NameList,S,CVal={'DEFAULT',VL},CType,_) when is_list(VL)->
-%% normalize_choice(S,CVal,CType,NameList);
-%% normalize_choice(NameList,S,CV={Name,_CV},CType,_) when is_atom(Name)->
-%% normalize_choice(S,{'CHOICE',CV},CType,NameList);
-%% normalize_choice(_,_S,V,_,_) ->
-%% V.
+normalize_choice(S, V, _CType, _NameList) ->
+ asn1_error(S, {illegal_id, error_value(V)}).
normalize_sequence(S,Value,Components,NameList)
when is_tuple(Components) ->
@@ -2732,12 +2238,9 @@ normalized_record(SorS,S,Value,Components,NameList) ->
Value;
_ ->
NoComps = length(Components),
- case normalize_seq_or_set(SorS,S,Value,Components,NameList,[]) of
- ListOfVals when length(ListOfVals) == NoComps ->
- list_to_tuple([NewName|ListOfVals]);
- _ ->
- error({type,{illegal,default,value,Value},S})
- end
+ ListOfVals = normalize_seq_or_set(SorS,S,Value,Components,NameList,[]),
+ NoComps = length(ListOfVals), %% Assert
+ list_to_tuple([NewName|ListOfVals])
end.
is_record_normalized(S,Name,V = #'Externalvaluereference'{},NumComps) ->
case get_referenced_type(S,V) of
@@ -2750,10 +2253,11 @@ is_record_normalized(_S,Name,Value,NumComps) when is_tuple(Value) ->
is_record_normalized(_,_,_,_) ->
false.
-normalize_seq_or_set(SorS, S, [{#seqtag{val=Cname},V}|Vs],
+normalize_seq_or_set(SorS, S,
+ [{#seqtag{val=Cname},V}|Vs],
[#'ComponentType'{name=Cname,typespec=TS}|Cs],
NameList, Acc) ->
- NewNameList =
+ NewNameList =
case TS#type.def of
#'Externaltypereference'{type=TName} ->
[TName];
@@ -2761,24 +2265,26 @@ normalize_seq_or_set(SorS, S, [{#seqtag{val=Cname},V}|Vs],
end,
NVal = normalize_value(S,TS,{'DEFAULT',V},NewNameList),
normalize_seq_or_set(SorS,S,Vs,Cs,NameList,[NVal|Acc]);
-normalize_seq_or_set(SorS,S,Values=[{_Cname1,_V}|_Vs],
+normalize_seq_or_set(SorS, S,
+ Values=[{#seqtag{val=Cname0},_V}|_Vs],
[#'ComponentType'{prop='OPTIONAL'}|Cs],
- NameList,Acc) ->
+ NameList, Acc) ->
+ verify_valid_component(S, Cname0, Cs),
normalize_seq_or_set(SorS,S,Values,Cs,NameList,[asn1_NOVALUE|Acc]);
-normalize_seq_or_set(SorS,S,Values=[{_Cname1,_V}|_Vs],
- [#'ComponentType'{name=Cname2,typespec=TS,
- prop={'DEFAULT',Value}}|Cs],
- NameList,Acc) ->
- NewNameList =
+normalize_seq_or_set(SorS, S,
+ Values=[{#seqtag{val=Cname0},_V}|_Vs],
+ [#'ComponentType'{name=Cname,typespec=TS,
+ prop={'DEFAULT',Value}}|Cs],
+ NameList, Acc) ->
+ verify_valid_component(S, Cname0, Cs),
+ NewNameList =
case TS#type.def of
#'Externaltypereference'{type=TName} ->
[TName];
- _ -> [Cname2|NameList]
+ _ -> [Cname|NameList]
end,
NVal = normalize_value(S,TS,{'DEFAULT',Value},NewNameList),
normalize_seq_or_set(SorS,S,Values,Cs,NameList,[NVal|Acc]);
-normalize_seq_or_set(_SorS,_S,[],[],_,Acc) ->
- lists:reverse(Acc);
%% If default value is {} ComponentTypes in SEQUENCE are marked DEFAULT
%% or OPTIONAL (or the type is defined SEQUENCE{}, which is handled by
%% the previous case).
@@ -2801,9 +2307,23 @@ normalize_seq_or_set(SorS,S,Value=#'Externalvaluereference'{},
Cs,NameList,Acc) ->
get_normalized_value(S,Value,Cs,fun normalize_seq_or_set/6,
[SorS,NameList,Acc]);
-normalize_seq_or_set(_SorS,S,V,_,_,_) ->
- error({type,{illegal,default,value,V},S}).
-
+normalize_seq_or_set(_SorS, _S, [], [], _, Acc) ->
+ lists:reverse(Acc);
+normalize_seq_or_set(_SorS, S, V, Cs, _, _) ->
+ case V of
+ [{#seqtag{val=Name},_}|_] ->
+ asn1_error(S, {illegal_id,error_value(Name)});
+ [] ->
+ [#'ComponentType'{name=Name}|_] = Cs,
+ asn1_error(S, {missing_id,error_value(Name)})
+ end.
+
+verify_valid_component(S, Name, Cs) ->
+ case lists:keyfind(Name, #'ComponentType'.name, Cs) of
+ false -> asn1_error(S, {illegal_id,error_value(Name)});
+ #'ComponentType'{} -> ok
+ end.
+
normalize_seqof(S,Value,Type,NameList) ->
normalize_s_of('SEQUENCE OF',S,Value,Type,NameList).
@@ -2859,10 +2379,7 @@ normalize_restrictedstring(_S,CString,_) when is_list(CString) ->
%% definedvalue case or argument in a parameterized type
normalize_restrictedstring(S,ERef,CType) when is_record(ERef,'Externalvaluereference') ->
get_normalized_value(S,ERef,CType,
- fun normalize_restrictedstring/3,[]);
-%%
-normalize_restrictedstring(S,{Name,Val},CType) when is_atom(Name) ->
- normalize_restrictedstring(S,Val,CType).
+ fun normalize_restrictedstring/3,[]).
normalize_objectclassfieldvalue(S,{opentypefieldvalue,Type,Value},NameList) ->
%% An open type has per definition no type. Thus should the type
@@ -2910,6 +2427,8 @@ call_Func(S,Val,Type,Func,ArgList) ->
get_canonic_type(S,Type,NameList) ->
{InnerType,NewType,NewNameList} =
case Type#type.def of
+ 'INTEGER'=Name ->
+ {Name,[],NameList};
Name when is_atom(Name) ->
{Name,Type,NameList};
Ref when is_record(Ref,'Externaltypereference') ->
@@ -2964,8 +2483,8 @@ check_formal_parameter(_, {_,_}) ->
ok;
check_formal_parameter(_, #'Externaltypereference'{}) ->
ok;
-check_formal_parameter(S, #'Externalvaluereference'{value=Name}=Ref) ->
- asn1_error(S, Ref, {illegal_typereference,Name}).
+check_formal_parameter(S, #'Externalvaluereference'{value=Name}) ->
+ asn1_error(S, {illegal_typereference,Name}).
% check_type(S,Type,ObjSpec={{objectclassname,_},_}) ->
% check_class(S,ObjSpec);
@@ -2977,7 +2496,7 @@ check_type(_S,Type,Ts) when is_record(Type,typedef),
Ts;
check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
{Def,Tag,Constr,IsInlined} =
- case match_parameters(S,Ts#type.def,S#state.parameters) of
+ case match_parameter(S, Ts#type.def) of
#type{tag=PTag,constraint=_Ctmp,def=Dtmp,inlined=Inl} ->
{Dtmp,merge_tags(Ts#type.tag,PTag),Ts#type.constraint,Inl};
#typedef{typespec=#type{tag=PTag,def=Dtmp,inlined=Inl}} ->
@@ -2989,16 +2508,16 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
inlined=IsInlined},
TestFun =
fun(Tref) ->
- MaybeChoice = get_non_typedef(S, Tref),
+ {_, MaybeChoice} = get_referenced_type(S, Tref, true),
case catch((MaybeChoice#typedef.typespec)#type.def) of
{'CHOICE',_} ->
- maybe_illicit_implicit_tag(choice,Tag);
+ maybe_illicit_implicit_tag(S, choice, Tag);
'ANY' ->
- maybe_illicit_implicit_tag(open_type,Tag);
+ maybe_illicit_implicit_tag(S, open_type, Tag);
'ANY DEFINED BY' ->
- maybe_illicit_implicit_tag(open_type,Tag);
+ maybe_illicit_implicit_tag(S, open_type, Tag);
'ASN1_OPEN_TYPE' ->
- maybe_illicit_implicit_tag(open_type,Tag);
+ maybe_illicit_implicit_tag(S, open_type, Tag);
_ ->
Tag
end
@@ -3007,7 +2526,7 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
case Def of
Ext when is_record(Ext,'Externaltypereference') ->
{RefMod,RefTypeDef,IsParamDef} =
- case get_referenced_type(S,Ext) of
+ case get_referenced_type(S, Ext) of
{undefined,TmpTDef} -> %% A parameter
{get(top_module),TmpTDef,true};
{TmpRefMod,TmpRefDef} ->
@@ -3031,7 +2550,6 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
NewS = S#state{mname=RefMod,
module=load_asn1_module(S,RefMod),
tname=get_datastr_name(NewRefTypeDef1),
- type=NewRefTypeDef1,
abscomppath=[],recordtopname=[]},
RefType1 =
check_type(NewS,RefTypeDef,RefTypeDef#typedef.typespec),
@@ -3051,18 +2569,17 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
Key);
_ -> ok
end,
+ Pos = Ext#'Externaltypereference'.pos,
{RefType1,#'Externaltypereference'{module=RefMod,
+ pos=Pos,
type=TmpName}}
end,
case asn1ct_gen:prim_bif(asn1ct_gen:get_inner(RefType#type.def)) of
true ->
%% Here we expand to a built in type and inline it
- NewS2 = S#state{type=#typedef{typespec=RefType}},
- NewC =
- constraint_merge(NewS2,
- check_constraints(NewS2,Constr)++
- RefType#type.constraint),
+ NewC = check_constraints(S, RefType, Constr ++
+ RefType#type.constraint),
TempNewDef#newt{
type = RefType#type.def,
tag = merge_tags(Ct,RefType#type.tag),
@@ -3073,19 +2590,13 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
NewExt = ExtRef#'Externaltypereference'{module=merged_mod(S,RefMod,Ext)},
TempNewDef#newt{
type = check_externaltypereference(S,NewExt),
- tag = case S#state.erule of
- ber ->
- merge_tags(Ct,RefType#type.tag);
- _ ->
- Ct
- end
- }
+ tag = merge_tags(Ct,RefType#type.tag)}
end;
'ANY' ->
- Ct=maybe_illicit_implicit_tag(open_type,Tag),
+ Ct = maybe_illicit_implicit_tag(S, open_type, Tag),
TempNewDef#newt{type='ASN1_OPEN_TYPE',tag=Ct};
{'ANY_DEFINED_BY',_} ->
- Ct=maybe_illicit_implicit_tag(open_type,Tag),
+ Ct = maybe_illicit_implicit_tag(S, open_type, Tag),
TempNewDef#newt{type='ASN1_OPEN_TYPE',tag=Ct};
'INTEGER' ->
TempNewDef#newt{tag=
@@ -3132,7 +2643,7 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
{'ENUMERATED',NamedNumberList} ->
TempNewDef#newt{type=
{'ENUMERATED',
- check_enumerated(S,NamedNumberList,Constr)},
+ check_enumerated(S, NamedNumberList)},
tag=
merge_tags(Tag,?TAG_PRIMITIVE(?N_ENUMERATED)),
constraint=[]};
@@ -3235,7 +2746,7 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
tag=
merge_tags(Tag,?TAG_CONSTRUCTED(?N_SEQUENCE))};
{'CHOICE',Components} ->
- Ct = maybe_illicit_implicit_tag(choice,Tag),
+ Ct = maybe_illicit_implicit_tag(S, choice, Tag),
TempNewDef#newt{type={'CHOICE',check_choice(S,Type,Components)},tag=Ct};
Set when is_record(Set,'SET') ->
RecordName=
@@ -3258,12 +2769,6 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
TempNewDef#newt{type={'SET OF',check_setof(S,Type,Components)},
tag=
merge_tags(Tag,?TAG_CONSTRUCTED(?N_SET))};
- %% This is a temporary hack until the full Information Obj Spec
- %% in X.681 is supported
- {#'Externaltypereference'{type='TYPE-IDENTIFIER'},
- [{typefieldreference,_,'Type'}]} ->
- Ct=maybe_illicit_implicit_tag(open_type,Tag),
- TempNewDef#newt{type='ASN1_OPEN_TYPE',tag=Ct};
{pt,Ptype,ParaList} ->
%% Ptype might be a parameterized - type, object set or
@@ -3271,18 +2776,18 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
%% calling function.
{_RefMod,Ptypedef} = get_referenced_type(S,Ptype),
notify_if_not_ptype(S,Ptypedef),
- NewParaList =
- [match_parameters(S,TmpParam,S#state.parameters)||
- TmpParam <- ParaList],
+ NewParaList = match_parameters(S, ParaList),
Instance = instantiate_ptype(S,Ptypedef,NewParaList),
TempNewDef#newt{type=Instance#type.def,
tag=merge_tags(Tag,Instance#type.tag),
constraint=Instance#type.constraint,
inlined=yes};
- OCFT=#'ObjectClassFieldType'{classname=ClRef} ->
+ #'ObjectClassFieldType'{classname=ClRef0}=OCFT0 ->
%% this case occures in a SEQUENCE when
%% the type of the component is a ObjectClassFieldType
+ ClRef = match_parameter(S, ClRef0),
+ OCFT = OCFT0#'ObjectClassFieldType'{classname=ClRef},
ClassSpec = check_class(S,ClRef),
NewTypeDef =
maybe_open_type(S,ClassSpec,
@@ -3292,16 +2797,18 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
Ct =
case is_open_type(NewTypeDef) of
true ->
- maybe_illicit_implicit_tag(open_type,MergedTag);
+ maybe_illicit_implicit_tag(S, open_type, MergedTag);
_ ->
MergedTag
end,
case TopName of
[] when Type#typedef.name =/= undefined ->
%% This is a top-level type.
- #type{def=Simplified} =
- simplify_type(#type{def=NewTypeDef}),
- TempNewDef#newt{type=Simplified,tag=Ct};
+ #type{constraint=C,def=Simplified} =
+ simplify_type(#type{def=NewTypeDef,
+ constraint=Constr}),
+ TempNewDef#newt{type=Simplified,tag=Ct,
+ constraint=C};
_ ->
TempNewDef#newt{type=NewTypeDef,tag=Ct}
end;
@@ -3311,33 +2818,21 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
TempNewDef#newt{tag=merge_tags(Tag,CheckedT#type.tag),
type=CheckedT#type.def};
- {valueset,Vtype} ->
- TempNewDef#newt{type={valueset,check_type(S,Type,Vtype)}};
{'SelectionType',Name,T} ->
CheckedT = check_selectiontype(S,Name,T),
TempNewDef#newt{tag=merge_tags(Tag,CheckedT#type.tag),
type=CheckedT#type.def};
- Other ->
- exit({'cant check' ,Other})
+ 'ASN1_OPEN_TYPE' ->
+ TempNewDef
end,
#newt{type=TDef,tag=NewTags,constraint=NewConstr,inlined=Inlined} = NewDef,
Ts#type{def=TDef,
inlined=Inlined,
- constraint=check_constraints(S, NewConstr),
+ constraint=check_constraints(S, #type{def=TDef}, NewConstr),
tag=lists:map(fun(#tag{type={default,TTx}}=TempTag) ->
TempTag#tag{type=TTx};
(Other) -> Other
- end, NewTags)};
-check_type(_S,Type,Ts) ->
- exit({error,{asn1,internal_error,Type,Ts}}).
-
-get_non_typedef(S, Tref0) ->
- case get_referenced_type(S, Tref0) of
- {_,#typedef{typespec=#type{def=#'Externaltypereference'{}=Tref}}} ->
- get_non_typedef(S, Tref);
- {_,Type} ->
- Type
- end.
+ end, NewTags)}.
%%
@@ -3353,10 +2848,11 @@ simplify_comp(#'ComponentType'{typespec=Type0}=C) ->
C#'ComponentType'{typespec=Type};
simplify_comp(Other) -> Other.
-simplify_type(#type{tag=Tag,def=Inner}=T) ->
+simplify_type(#type{tag=Tag,def=Inner,constraint=Constr0}=T) ->
case Inner of
- #'ObjectClassFieldType'{type={fixedtypevaluefield,_,Type}} ->
- Type#type{tag=Tag};
+ #'ObjectClassFieldType'{type={fixedtypevaluefield,_,Type}}=OCFT ->
+ Constr = [{ocft,OCFT}|Type#type.constraint++Constr0],
+ Type#type{tag=Tag,constraint=Constr};
_ ->
T
end.
@@ -3389,29 +2885,22 @@ get_innertag(_S,#'ObjectClassFieldType'{type=Type}) ->
_ -> []
end.
-get_type_from_object(S,Object,TypeField)
- when is_record(Object,'Externaltypereference');
- is_record(Object,'Externalvaluereference') ->
- {_,ObjectDef} = get_referenced_type(S,Object),
- ObjSpec = check_object(S,ObjectDef,ObjectDef#typedef.typespec),
- get_fieldname_element(S,ObjectDef#typedef{typespec=ObjSpec},TypeField).
-
%% get_class_def(S, Type) -> #classdef{} | 'none'.
get_class_def(S, #typedef{typespec=#type{def=#'Externaltypereference'{}=Eref}}) ->
- {_,NextDef} = get_referenced_type(S, Eref),
+ {_,NextDef} = get_referenced_type(S, Eref, true),
get_class_def(S, NextDef);
get_class_def(S, #'Externaltypereference'{}=Eref) ->
- {_,NextDef} = get_referenced_type(S, Eref),
+ {_,NextDef} = get_referenced_type(S, Eref, true),
get_class_def(S, NextDef);
get_class_def(_S, #classdef{}=CD) ->
CD;
get_class_def(_S, _) ->
none.
-maybe_illicit_implicit_tag(Kind,Tag) ->
+maybe_illicit_implicit_tag(S, Kind, Tag) ->
case Tag of
[#tag{type='IMPLICIT'}|_T] ->
- throw({error,{asn1,{implicit_tag_before,Kind}}});
+ asn1_error(S, {implicit_tag_before,Kind});
[ChTag = #tag{type={default,_}}|T] ->
case Kind of
open_type ->
@@ -3438,19 +2927,24 @@ merged_mod(S,RefMod,Ext) ->
%% any UNIQUE field, so that a component relation constraint cannot specify
%% the type of a typefield, return 'ASN1_OPEN_TYPE'.
%%
-maybe_open_type(S,ClassSpec=#objectclass{fields=Fs},
- OCFT=#'ObjectClassFieldType'{fieldname=FieldRefList},
+maybe_open_type(_, _, #'ObjectClassFieldType'{fieldname={_,_}}=OCFT, _) ->
+ %% Already converted.
+ OCFT;
+maybe_open_type(S, #objectclass{fields=Fs}=ClassSpec,
+ #'ObjectClassFieldType'{fieldname=FieldRefList}=OCFT,
Constr) ->
- Type = get_ObjectClassFieldType(S,Fs,FieldRefList),
- FieldNames=get_referenced_fieldname(FieldRefList),
- case last_fieldname(FieldRefList) of
+ Type = get_OCFType(S, Fs, FieldRefList),
+ FieldNames = get_referenced_fieldname(FieldRefList),
+ case lists:last(FieldRefList) of
{valuefieldreference,_} ->
OCFT#'ObjectClassFieldType'{fieldname=FieldNames,
type=Type};
{typefieldreference,_} ->
- case {catch get_unique_fieldname(S,#classdef{typespec=ClassSpec}),
- asn1ct_gen:get_constraint(Constr,componentrelation)}of
- {Tuple,_} when tuple_size(Tuple) =:= 3 ->
+ %% Note: The constraints have not been checked yet,
+ %% so we must use a special lookup routine.
+ case {get_unique_fieldname(S, #classdef{typespec=ClassSpec}),
+ get_componentrelation(Constr)} of
+ {no_unique,_} ->
OCFT#'ObjectClassFieldType'{fieldname=FieldNames,
type='ASN1_OPEN_TYPE'};
{_,no} ->
@@ -3462,16 +2956,12 @@ maybe_open_type(S,ClassSpec=#objectclass{fields=Fs},
end
end.
-last_fieldname(FieldRefList) when is_list(FieldRefList) ->
- lists:last(FieldRefList);
-last_fieldname({FieldName,_}) when is_atom(FieldName) ->
- [A|_] = atom_to_list(FieldName),
- case is_lowercase(A) of
- true ->
- {valuefieldreference,FieldName};
- _ ->
- {typefieldreference,FieldName}
- end.
+get_componentrelation([{element_set,{componentrelation,_,_}=Cr,none}|_]) ->
+ Cr;
+get_componentrelation([_|T]) ->
+ get_componentrelation(T);
+get_componentrelation([]) ->
+ no.
is_open_type(#'ObjectClassFieldType'{type='ASN1_OPEN_TYPE'}) ->
true;
@@ -3510,35 +3000,19 @@ notify_if_not_ptype(S,#pobjectsetdef{class=Cl}) ->
_ ->
throw(pobjectsetdef)
end;
-notify_if_not_ptype(_S,PT) ->
- throw({error,{"supposed to be a parameterized type",PT}}).
-% fix me
+notify_if_not_ptype(S, PT) ->
+ asn1_error(S, {param_bad_type, error_value(PT)}).
+
instantiate_ptype(S,Ptypedef,ParaList) ->
#ptypedef{args=Args,typespec=Type} = Ptypedef,
NewType = check_ptype(S,Ptypedef,Type#type{inlined=yes}),
MatchedArgs = match_args(S,Args, ParaList, []),
OldArgs = S#state.parameters,
- NewS = S#state{type=NewType,parameters=MatchedArgs++OldArgs,abscomppath=[]},
-%% NewS = S#state{type=NewType,parameters=MatchedArgs,abscomppath=[]},
+ NewS = S#state{parameters=MatchedArgs++OldArgs,abscomppath=[]},
check_type(NewS, Ptypedef#ptypedef{typespec=NewType}, NewType).
-get_datastr_name(#typedef{name=N}) ->
- N;
-get_datastr_name(#classdef{name=N}) ->
- N;
-get_datastr_name(#valuedef{name=N}) ->
- N;
-get_datastr_name(#ptypedef{name=N}) ->
- N;
-get_datastr_name(#pvaluedef{name=N}) ->
- N;
-get_datastr_name(#pvaluesetdef{name=N}) ->
- N;
-get_datastr_name(#pobjectdef{name=N}) ->
- N;
-get_datastr_name(#pobjectsetdef{name=N}) ->
- N.
-
+get_datastr_name(Type) ->
+ asn1ct:get_name_of_def(Type).
get_pt_args(#ptypedef{args=Args}) ->
Args;
@@ -3606,8 +3080,8 @@ match_args(S,FA = [FormArg|Ft], AA = [ActArg|At], Acc) ->
end;
match_args(_S,[], [], Acc) ->
lists:reverse(Acc);
-match_args(_,_, _, _) ->
- throw({error,{asn1,{wrong_number_of_arguments}}}).
+match_args(S, _, _, _) ->
+ asn1_error(S, param_wrong_number_of_arguments).
%%%%%%%%%%%%%%%%%
%% categorize_arg(S,FormalArg,ActualArg) -> {FormalArg,CatgorizedActualArg}
@@ -3652,11 +3126,6 @@ parameter_name_style(#'Externaltypereference'{}) ->
parameter_name_style(#'Externalvaluereference'{}) ->
beginning_lowercase.
-is_lowercase(X) when X >= $A,X =< $W ->
- false;
-is_lowercase(_) ->
- true.
-
%% categorize(Parameter) -> CategorizedParameter
%% If Parameter has an abstract syntax of another category than
%% Category, transform it to a known syntax.
@@ -3705,725 +3174,503 @@ parse_objectset(Set) ->
Set.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% check_constraints/2
-%%
-check_constraints(S,C) when is_list(C) ->
- check_constraints(S, C, []).
-
-resolv_tuple_or_list(S,List) when is_list(List) ->
- lists:map(fun(X)->resolv_value(S,X) end, List);
-resolv_tuple_or_list(S,{Lb,Ub}) ->
- {resolv_value(S,Lb),resolv_value(S,Ub)}.
-
-%%%-----------------------------------------
-%% If the constraint value is a defined value the valuename
-%% is replaced by the actual value
%%
-resolv_value(S,Val) ->
- Id = match_parameters(S,Val, S#state.parameters),
- resolv_value1(S,Id).
+%% Check and simplify constraints.
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-resolv_value1(S, ERef = #'Externalvaluereference'{value=Name}) ->
- case catch resolve_namednumber(S, S#state.type, Name) of
- V when is_integer(V) ->
- V;
- _ ->
- case get_referenced_type(S,ERef) of
- {Err,_Reason} when Err == error; Err == 'EXIT' ->
- throw({error,{asn1,{undefined_type_or_value,
- Name}}});
- {_M,VDef} ->
- resolv_value1(S,VDef)
- end
- end;
-resolv_value1(S, {gt,V}) ->
- case resolv_value1(S, V) of
- Int when is_integer(Int) ->
- Int + 1;
- Other ->
- throw({error,{asn1,{not_integer_value,Other}}})
- end;
-resolv_value1(S, {lt,V}) ->
- case resolv_value1(S, V) of
- Int when is_integer(Int) ->
- Int - 1;
- Other ->
- throw({error,{asn1,{not_integer_value,Other}}})
- end;
-resolv_value1(S,{'ValueFromObject',{object,Object},[{valuefieldreference,
- FieldName}]}) ->
- %% FieldName can hold either a fixed-type value or a variable-type value
- %% Object is a DefinedObject, i.e. a #'Externaltypereference'
- resolve_value_from_object(S,Object,FieldName);
-resolv_value1(_,#valuedef{checked=true,value=V}) ->
- V;
-resolv_value1(S,#valuedef{type=_T,
- value={'ValueFromObject',{object,Object},
- [{valuefieldreference,
- FieldName}]}}) ->
- resolve_value_from_object(S,Object,FieldName);
-resolv_value1(S,VDef = #valuedef{}) ->
- #valuedef{value=Val} = check_value(S,VDef),
- Val;
-resolv_value1(_,V) ->
- V.
-resolve_value_from_object(S,Object,FieldName) ->
- {_,ObjTDef} = get_referenced_type(S,Object),
- TS = check_object(S,ObjTDef,ObjTDef#typedef.typespec),
- {_,_,Components} = TS#'Object'.def,
- case lists:keysearch(FieldName,1,Components) of
- {value,{_,#valuedef{value=Val}}} ->
- Val;
- _ ->
- error({value,"illegal value in constraint",S})
+check_constraints(_S, _HostType, []) ->
+ [];
+check_constraints(S, HostType0, [_|_]=Cs0) ->
+ HostType = get_real_host_type(HostType0, Cs0),
+ Cs1 = top_level_intersections(Cs0),
+ Cs2 = [coalesce_constraints(C) || C <- Cs1],
+ {_,Cs3} = filter_extensions(Cs2),
+ Cs = simplify_element_sets(S, HostType, Cs3),
+ finish_constraints(Cs).
+
+get_real_host_type(HostType, Cs) ->
+ case lists:keyfind(ocft, 1, Cs) of
+ false -> HostType;
+ {_,OCFT} -> HostType#type{def=OCFT}
end.
+top_level_intersections([{element_set,{intersection,_,_}=C,none}]) ->
+ top_level_intersections_1(C);
+top_level_intersections(Cs) ->
+ Cs.
+
+top_level_intersections_1({intersection,A,B}) ->
+ [{element_set,A,none}|top_level_intersections_1(B)];
+top_level_intersections_1(Other) ->
+ [{element_set,Other,none}].
+
+coalesce_constraints({element_set,
+ {Tag,{element_set,A,_}},
+ {Tag,{element_set,B,_}}}) ->
+ %% (SIZE (C1), ..., (SIZE (C2)) => (SIZE (C1, ..., C2))
+ {element_set,{Tag,{element_set,A,B}},none};
+coalesce_constraints(Other) ->
+ Other.
+
+%% Remove all outermost extensions except the last.
+
+filter_extensions([H0|T0]) ->
+ case filter_extensions(T0) of
+ {true,T} ->
+ H = remove_extension(H0),
+ {true,[H|T]};
+ {false,T} ->
+ {any_extension(H0),[H0|T]}
+ end;
+filter_extensions([]) ->
+ {false,[]}.
-resolve_namednumber(S,#typedef{typespec=Type},Name) ->
- case Type#type.def of
- {'ENUMERATED',NameList} ->
- resolve_namednumber_1(S, Name, NameList, Type);
- {'INTEGER',NameList} ->
- resolve_namednumber_1(S, Name, NameList, Type);
+remove_extension({element_set,Root,_}) ->
+ {element_set,remove_extension(Root),none};
+remove_extension(Tuple) when is_tuple(Tuple) ->
+ L = [remove_extension(El) || El <- tuple_to_list(Tuple)],
+ list_to_tuple(L);
+remove_extension(Other) -> Other.
+
+any_extension({element_set,_,Ext}) when Ext =/= none ->
+ true;
+any_extension(Tuple) when is_tuple(Tuple) ->
+ any_extension_tuple(1, Tuple);
+any_extension(_) -> false.
+
+any_extension_tuple(I, T) when I =< tuple_size(T) ->
+ any_extension(element(I, T)) orelse any_extension_tuple(I+1, T);
+any_extension_tuple(_, _) -> false.
+
+simplify_element_sets(S, HostType, [{element_set,R0,E0}|T0]) ->
+ R1 = simplify_element_set(S, HostType, R0),
+ E1 = simplify_element_set(S, HostType, E0),
+ case simplify_element_sets(S, HostType, T0) of
+ [{element_set,R2,E2}] ->
+ [{element_set,cs_intersection(S, R1, R2),
+ cs_intersection(S, E1, E2)}];
+ L when is_list(L) ->
+ [{element_set,R1,E1}|L]
+ end;
+simplify_element_sets(S, HostType, [H|T]) ->
+ [H|simplify_element_sets(S, HostType, T)];
+simplify_element_sets(_, _, []) ->
+ [].
+
+simplify_element_set(_S, _HostType, empty) ->
+ {set,[]};
+simplify_element_set(S, HostType, {'SingleValue',Vs0}) when is_list(Vs0) ->
+ Vs1 = [resolve_value(S, HostType, V) || V <- Vs0],
+ Vs = make_constr_set_vs(Vs1),
+ simplify_element_set(S, HostType, Vs);
+simplify_element_set(S, HostType, {'SingleValue',V0}) ->
+ V1 = resolve_value(S, HostType, V0),
+ V = {set,[{range,V1,V1}]},
+ simplify_element_set(S, HostType, V);
+simplify_element_set(S, HostType, {'ValueRange',{Lb0,Ub0}}) ->
+ Lb = resolve_value(S, HostType, Lb0),
+ Ub = resolve_value(S, HostType, Ub0),
+ V = make_constr_set(S, Lb, Ub),
+ simplify_element_set(S, HostType, V);
+simplify_element_set(S, HostType, {'ALL-EXCEPT',Set0}) ->
+ Set = simplify_element_set(S, HostType, Set0),
+ {'ALL-EXCEPT',Set};
+simplify_element_set(S, HostType, {intersection,A0,B0}) ->
+ A = simplify_element_set(S, HostType, A0),
+ B = simplify_element_set(S, HostType, B0),
+ cs_intersection(S, A, B);
+simplify_element_set(S, HostType, {union,A0,B0}) ->
+ A = simplify_element_set(S, HostType, A0),
+ B = simplify_element_set(S, HostType, B0),
+ cs_union(S, A, B);
+simplify_element_set(S, HostType, {simpletable,{element_set,Type,_}}) ->
+ check_simpletable(S, HostType, Type);
+simplify_element_set(S, _, {componentrelation,R,Id}) ->
+ check_componentrelation(S, R, Id);
+simplify_element_set(S, HostType, {Tag,{element_set,_,_}=El0}) ->
+ [El1] = simplify_element_sets(S, HostType, [El0]),
+ {Tag,El1};
+simplify_element_set(S, HostType, #type{}=Type) ->
+ simplify_element_set_type(S, HostType, Type);
+simplify_element_set(_, _, C) ->
+ C.
+
+simplify_element_set_type(S, HostType, #type{def=Def0}=Type0) ->
+ #'Externaltypereference'{} = Def0, %Assertion.
+ case get_referenced_type(S, Def0) of
+ {_,#valuedef{checked=false,value={valueset,Vs0}}} ->
+ [Vs1] = simplify_element_sets(S, HostType, [Vs0]),
+ case Vs1 of
+ {element_set,Set,none} ->
+ Set;
+ {element_set,Set,{set,[]}} ->
+ Set
+ end;
+ {_,{valueset,#type{def=#'Externaltypereference'{}}=Type}} ->
+ simplify_element_set_type(S, HostType, Type);
_ ->
- not_enumerated
+ case HostType of
+ #type{def=#'ObjectClassFieldType'{}} ->
+ %% Open type.
+ #type{def=Def} = check_type(S, HostType, Type0),
+ Def;
+ _ ->
+ #type{constraint=Cs} = check_type(S, HostType, Type0),
+ C = convert_back(Cs),
+ simplify_element_set(S, HostType, C)
+ end
end.
-resolve_namednumber_1(S, Name, NameList, Type) ->
- NamedNumberList = check_enumerated(S, NameList, Type#type.constraint),
- {_,N} = lookup_enum_value(S, Name, NamedNumberList),
- N.
-
-check_constraints(S,[{'ContainedSubtype',Type} | Rest], Acc) ->
- {RefMod,CTDef} = get_referenced_type(S,Type#type.def),
- NewS = S#state{module=load_asn1_module(S,RefMod),mname=RefMod,
- type=CTDef,tname=get_datastr_name(CTDef)},
- CType = check_type(NewS,S#state.tname,CTDef#typedef.typespec),
- check_constraints(S,Rest,CType#type.constraint ++ Acc);
-check_constraints(S,[C | Rest], Acc) ->
- check_constraints(S,Rest,[check_constraint(S,C) | Acc]);
-check_constraints(S,[],Acc) ->
- constraint_merge(S,Acc).
-
-
-range_check(F={FixV,FixV}) ->
-% FixV;
- F;
-range_check(VR={Lb,Ub}) when Lb < Ub ->
- VR;
-range_check(Err={_,_}) ->
- throw({error,{asn1,{illegal_size_constraint,Err}}});
-range_check(Value) ->
- Value.
-
-check_constraint(S,Ext) when is_record(Ext,'Externaltypereference') ->
- check_externaltypereference(S,Ext);
-
-
-check_constraint(S,{'SizeConstraint',{Lb,Ub}})
- when is_list(Lb); tuple_size(Lb) =:= 2 ->
- NewLb = range_check(resolv_tuple_or_list(S,Lb)),
- NewUb = range_check(resolv_tuple_or_list(S,Ub)),
- {'SizeConstraint',{NewLb,NewUb}};
-check_constraint(S,{'SizeConstraint',{Lb,Ub}}) ->
- case {resolv_value(S,Lb),resolv_value(S,Ub)} of
- {FixV,FixV} ->
- {'SizeConstraint',FixV};
- {Low,High} when Low < High ->
- {'SizeConstraint',{Low,High}};
- Err ->
- throw({error,{asn1,{illegal_size_constraint,Err}}})
- end;
-check_constraint(S,{'SizeConstraint',Lb}) ->
- {'SizeConstraint',resolv_value(S,Lb)};
+convert_back([H1,H2|T]) ->
+ {intersection,H1,convert_back([H2|T])};
+convert_back([H]) ->
+ H;
+convert_back([]) ->
+ none.
-check_constraint(S,{'SingleValue', L}) when is_list(L) ->
- F = fun(A) -> resolv_value(S,A) end,
- {'SingleValue',lists:sort(lists:map(F,L))};
-
-check_constraint(S,{'SingleValue', V}) when is_integer(V) ->
- Val = resolv_value(S,V),
-%% [{'SingleValue',Val},{'ValueRange',{Val,Val}}]; % Why adding value range?
- {'SingleValue',Val};
-check_constraint(S,{'SingleValue', V}) ->
- {'SingleValue',resolv_value(S,V)};
-
-check_constraint(S,{'ValueRange', {Lb, Ub}}) ->
- {'ValueRange',{resolv_value(S,Lb),resolv_value(S,Ub)}};
-%% In case of a constraint with extension marks like (1..Ub,...)
-check_constraint(S,{VR={'ValueRange', {_Lb, _Ub}},Rest}) ->
- {check_constraint(S,VR),Rest};
-check_constraint(_S,{'PermittedAlphabet',PA}) ->
- {'PermittedAlphabet',permitted_alphabet_cnstr(PA)};
-
-check_constraint(S,{valueset,Type}) ->
- {valueset,check_type(S,S#state.tname,Type)};
-
-check_constraint(_S,ST={simpletable,Type}) when is_atom(Type) ->
- %% An already checked constraint
- ST;
-check_constraint(S,{simpletable,Type}) ->
+check_simpletable(S, HostType, Type) ->
+ case HostType of
+ #type{def=#'ObjectClassFieldType'{}} ->
+ ok;
+ _ ->
+ %% Table constraints may only be applied to
+ %% CLASS.&field constructs.
+ asn1_error(S, illegal_table_constraint)
+ end,
Def = case Type of
#type{def=D} -> D;
- {'SingleValue',ObjRef = #'Externalvaluereference'{}} ->
- ObjRef
+ {'SingleValue',#'Externalvaluereference'{}=ObjRef} ->
+ ObjRef;
+ _ ->
+ asn1_error(S, invalid_table_constraint)
end,
- C = match_parameters(S,Def,S#state.parameters),
+ C = match_parameter(S, Def),
case C of
#'Externaltypereference'{} ->
- ERef = check_externaltypereference(S,C),
- {simpletable,ERef#'Externaltypereference'.type};
- #type{def=#'Externaltypereference'{}=ExtTypeRef} ->
- ERef = check_externaltypereference(S, ExtTypeRef),
+ ERef = check_externaltypereference(S, C),
{simpletable,ERef#'Externaltypereference'.type};
- {valueset,#type{def=ERef=#'Externaltypereference'{}}} -> % this is an object set
- {_,TDef} = get_referenced_type(S,ERef),
- case TDef#typedef.typespec of
- #'ObjectSet'{} ->
- check_object(S,TDef,TDef#typedef.typespec),
- {simpletable,ERef#'Externaltypereference'.type};
- Err ->
- exit({error,{internal_error,Err}})
- end;
#'Externalvaluereference'{} ->
%% This is an object set with a referenced object
- {_,TorVDef} = get_referenced_type(S,C),
- GetObjectSet =
- fun(#typedef{typespec=O}) when is_record(O,'Object') ->
- #'ObjectSet'{class=O#'Object'.classname,
- set={'SingleValue',C}};
- (#valuedef{type=Cl,value=O})
- when is_record(O,'Externalvaluereference'),
- is_record(Cl,type) ->
- %% an object might reference another object
- #'ObjectSet'{class=Cl#type.def,
- set={'SingleValue',O}};
- (Err) ->
- exit({error,{internal_error,simpletable_constraint,Err}})
- end,
- ObjSet = GetObjectSet(TorVDef),
- {simpletable,check_object(S,Type,ObjSet)};
- #'ObjectSet'{} ->
- io:format("ALERT: simpletable forbidden case!~n",[]),
- {simpletable,check_object(S,Type,C)};
- {'ValueFromObject',{_,ORef},FieldName} ->
- %% This is an ObjectFromObject
- {_,Object} = get_referenced_type(S,ORef),
- ChObject = check_object(S,Object,
- Object#typedef.typespec),
- ObjFromObj=
- get_fieldname_element(S,Object#typedef{
- typespec=ChObject},
- FieldName),
- {simpletable,ObjFromObj};
-%% ObjFromObj#typedef{checked=true,typespec=
-%% check_object(S,ObjFromObj,
-%% ObjFromObj#typedef.typespec)}};
- _ ->
- check_type(S,S#state.tname,Type),%% this seems stupid.
- OSName = Def#'Externaltypereference'.type,
- {simpletable,OSName}
- end;
+ {_,TorVDef} = get_referenced_type(S, C),
+ Set = case TorVDef of
+ #typedef{typespec=#'Object'{classname=ClassName}} ->
+ #'ObjectSet'{class=ClassName,
+ set={'SingleValue',C}};
+ #valuedef{type=#type{def=ClassDef},
+ value=#'Externalvaluereference'{}=Obj} ->
+ %% an object might reference another object
+ #'ObjectSet'{class=ClassDef,
+ set={'SingleValue',Obj}}
+ end,
+ {simpletable,check_object(S, Type, Set)};
+ {'ValueFromObject',{_,Object},FieldNames} ->
+ %% This is an ObjectFromObject.
+ {simpletable,extract_field(S, Object, FieldNames)}
+ end.
-check_constraint(S,{componentrelation,{objectset,Opos,Objset},Id}) ->
+check_componentrelation(S, {objectset,Opos,Objset0}, Id) ->
%% Objset is an 'Externaltypereference' record, since Objset is
%% a DefinedObjectSet.
- RealObjset = match_parameters(S,Objset,S#state.parameters),
- ObjSetRef =
- case RealObjset of
- #'Externaltypereference'{} -> RealObjset;
- #type{def=#'Externaltypereference'{}} -> RealObjset#type.def;
- {valueset,OS = #type{def=#'Externaltypereference'{}}} -> OS#type.def
- end,
- Ext = check_externaltypereference(S,ObjSetRef),
- {componentrelation,{objectset,Opos,Ext},Id};
+ ObjSet = match_parameter(S, Objset0),
+ Ext = check_externaltypereference(S, ObjSet),
+ {componentrelation,{objectset,Opos,Ext},Id}.
+
+%%%
+%%% Internal set representation.
+%%%
+%%% We represent sets as a union of strictly disjoint ranges:
+%%%
+%%% {set,[Range]}
+%%%
+%%% A range is represented as:
+%%%
+%%% Range = {a_range,UpperBound} | {range,LowerBound,UpperBound}
+%%%
+%%% We don't use the atom 'MIN' to represent MIN, because atoms
+%%% compare higher than integer. Instead we use {a_range,UpperBound}
+%%% to represent MIN..UpperBound. We represent MAX as 'MAX' because
+%%% 'MAX' compares higher than any integer.
+%%%
+%%% The ranges are sorted in term order. The ranges must not overlap
+%%% or be adjacent to each other. This invariant is established when
+%%% creating sets, and maintained by the intersection and union
+%%% operators.
+%%%
+%%% Example of invalid set representaions:
+%%%
+%%% [{range,0,10},{range,5,10}] %Overlapping ranges
+%%% [{range,0,5},{range,6,10}] %Adjancent ranges
+%%% [{range,10,20},{a_range,100}] %Not sorted
+%%%
+
+make_constr_set(_, 'MIN', Ub) ->
+ {set,[{a_range,make_constr_set_val(Ub)}]};
+make_constr_set(_, Lb, Ub) when Lb =< Ub ->
+ {set,[{range,make_constr_set_val(Lb),
+ make_constr_set_val(Ub)}]};
+make_constr_set(S, _, _) ->
+ asn1_error(S, reversed_range).
+
+make_constr_set_val([C]) when is_integer(C) -> C;
+make_constr_set_val(Val) -> Val.
+
+make_constr_set_vs(Vs) ->
+ {set,make_constr_set_vs_1(Vs)}.
+
+make_constr_set_vs_1([]) ->
+ [];
+make_constr_set_vs_1([V]) ->
+ [{range,V,V}];
+make_constr_set_vs_1([V0|Vs]) ->
+ V1 = make_constr_set_vs_1(Vs),
+ range_union([{range,V0,V0}], V1).
+
+%%%
+%%% Set operators.
+%%%
+
+cs_intersection(_S, Other, none) ->
+ Other;
+cs_intersection(_S, none, Other) ->
+ Other;
+cs_intersection(_S, {set,SetA}, {set,SetB}) ->
+ {set,range_intersection(SetA, SetB)};
+cs_intersection(_S, A, B) ->
+ {intersection,A,B}.
+
+range_intersection([], []) ->
+ [];
+range_intersection([_|_], []) ->
+ [];
+range_intersection([], [_|_]) ->
+ [];
+range_intersection([H1|_]=A, [H2|_]=B) when H1 > H2 ->
+ range_intersection(B, A);
+range_intersection([H1|T1], [H2|T2]=B) ->
+ %% Now H1 =< H2.
+ case {H1,H2} of
+ {{a_range,Ub0},{a_range,Ub1}} when Ub0 < Ub1 ->
+ %% Ub0 =/= 'MAX'
+ [H1|range_intersection(T1, [{range,Ub0+1,Ub1}|T2])];
+ {{a_range,_},{a_range,_}} ->
+ %% Must be equal.
+ [H1|range_intersection(T1, T2)];
+ {{a_range,Ub0},{range,Lb1,_Ub1}} when Ub0 < Lb1 ->
+ %% No intersection.
+ range_intersection(T1, B);
+ {{a_range,Ub0},{range,Lb1,Ub1}} when Ub0 < Ub1 ->
+ %% Ub0 =/= 'MAX'
+ [{range,Lb1,Ub0}|range_intersection(T1, [{range,Ub0+1,Ub1}|T2])];
+ {{a_range,Ub},{range,_Lb1,Ub}} ->
+ %% The first range covers the second range, but does not
+ %% go beyond. We handle this case specially because Ub may
+ %% be 'MAX', and evaluating 'MAX'+1 will fail.
+ [H2|range_intersection(T1, T2)];
+ {{a_range,Ub0},{range,_Lb1,Ub1}} ->
+ %% Ub0 > Ub1, Ub1 =/= 'MAX'. The first range completely
+ %% covers and extends beyond the second range.
+ [H2|range_intersection([{range,Ub1+1,Ub0}|T1], T2)];
+ {{range,_Lb0,Ub0},{range,Lb1,_Ub1}} when Ub0 < Lb1 ->
+ %% Lb0 < Lb1. No intersection.
+ range_intersection(T1, B);
+ {{range,_Lb0,Ub0},{range,Lb1,Ub1}} when Ub0 < Ub1 ->
+ %% Ub0 >= Lb1, Ub0 =/= 'MAX'. Partial overlap.
+ [{range,Lb1,Ub0}|range_intersection(T1, [{range,Ub0+1,Ub1}|T2])];
+ {{range,_Lb0,Ub},{range,_Lb1,Ub}} ->
+ %% The first range covers the second range, but does not
+ %% go beyond. We handle this case specially because Ub may
+ %% be 'MAX', and evaluating 'MAX'+1 will fail.
+ [H2|range_intersection(T1, T2)];
+ {{range,_Lb0,Ub0},{range,_Lb1,Ub1}} ->
+ %% Ub1 =/= MAX. The first range completely covers and
+ %% extends beyond the second.
+ [H2|range_intersection([{range,Ub1+1,Ub0}|T1], T2)]
+ end.
-check_constraint(S,Type) when is_record(Type,type) ->
- #type{def=Def} = check_type(S,S#state.tname,Type),
- Def;
+cs_union(_S, {set,SetA}, {set,SetB}) ->
+ {set,range_union(SetA, SetB)};
+cs_union(_S, A, B) ->
+ {union,A,B}.
+
+range_union(A, B) ->
+ range_union_1(lists:merge(A, B)).
+
+range_union_1([{a_range,Ub0},{a_range,Ub1}|T]) ->
+ range_union_1([{a_range,max(Ub0, Ub1)}|T]);
+range_union_1([{a_range,Ub0},{range,Lb1,Ub1}|T]) when Lb1-1 =< Ub0 ->
+ range_union_1([{a_range,max(Ub0, Ub1)}|T]);
+range_union_1([{a_range,_}=H|T]) ->
+ %% Ranges are disjoint.
+ [H|range_union_1(T)];
+range_union_1([{range,Lb0,Ub0},{range,Lb1,Ub1}|T]) when Lb1-1 =< Ub0 ->
+ range_union_1([{range,Lb0,max(Ub0, Ub1)}|T]);
+range_union_1([{range,_,_}=H|T]) ->
+ %% Ranges are disjoint.
+ [H|range_union_1(T)];
+range_union_1([]) ->
+ [].
-check_constraint(S,C) when is_list(C) ->
- lists:map(fun(X)->check_constraint(S,X) end,C);
-% else keep the constraint unchanged
-check_constraint(_S,Any) ->
-% io:format("Constraint = ~p~n",[Any]),
- Any.
-
-permitted_alphabet_cnstr(T) when is_tuple(T) ->
- permitted_alphabet_cnstr([T]);
-permitted_alphabet_cnstr(L) when is_list(L) ->
- VRexpand = fun({'ValueRange',{A,B}}) ->
- {'SingleValue',expand_valuerange(A,B)};
- (Other) ->
- Other
- end,
- L2 = lists:map(VRexpand,L),
- %% first perform intersection
- L3 = permitted_alphabet_intersection(L2),
- [Res] = permitted_alphabet_union(L3),
- Res.
+%%%
+%%% Finish up constrains, making them suitable for the back-ends.
+%%%
+%%% A 'PermittedAlphabet' (FROM) constraint will be reduced to:
+%%%
+%%% {'SingleValue',[integer()]}
+%%%
+%%% A 'SizeConstraint' (SIZE) constraint will be reduced to:
+%%%
+%%% {Lb,Ub}
+%%%
+%%% All other constraints will be reduced to:
+%%%
+%%% {'SingleValue',[integer()]} | {'ValueRange',Lb,Ub}
+%%%
+
+finish_constraints(Cs) ->
+ finish_constraints_1(Cs, fun smart_collapse/1).
+
+finish_constraints_1([{element_set,{Tag,{element_set,_,_}=Set0},none}|T],
+ Collapse0) ->
+ Collapse = collapse_fun(Tag),
+ case finish_constraints_1([Set0], Collapse) of
+ [] ->
+ finish_constraints_1(T, Collapse0);
+ [Set] ->
+ [{Tag,Set}|finish_constraints_1(T, Collapse0)]
+ end;
+finish_constraints_1([{element_set,{set,[{a_range,'MAX'}]},_}|T], Collapse) ->
+ finish_constraints_1(T, Collapse);
+finish_constraints_1([{element_set,{intersection,A0,B0},none}|T], Collapse) ->
+ A = {element_set,A0,none},
+ B = {element_set,B0,none},
+ finish_constraints_1([A,B|T], Collapse);
+finish_constraints_1([{element_set,Root,Ext}|T], Collapse) ->
+ case finish_constraint(Root, Ext, Collapse) of
+ none ->
+ finish_constraints_1(T, Collapse);
+ Constr ->
+ [Constr|finish_constraints_1(T, Collapse)]
+ end;
+finish_constraints_1([H|T], Collapse) ->
+ [H|finish_constraints_1(T, Collapse)];
+finish_constraints_1([], _) ->
+ [].
-expand_valuerange([A],[A]) ->
- [A];
-expand_valuerange([A],[B]) when A < B ->
- [A|expand_valuerange([A+1],[B])].
+finish_constraint({set,Root0}, Ext, Collapse) ->
+ case Collapse(Root0) of
+ none -> none;
+ Root -> finish_constraint(Root, Ext, Collapse)
+ end;
+finish_constraint(Root, Ext, _Collapse) ->
+ case Ext of
+ none -> Root;
+ _ -> {Root,[]}
+ end.
-permitted_alphabet_intersection(C) ->
- permitted_alphabet_merge(C,intersection, []).
+collapse_fun('SizeConstraint') ->
+ fun size_constraint_collapse/1;
+collapse_fun('PermittedAlphabet') ->
+ fun single_value_collapse/1.
-permitted_alphabet_union(C) ->
- permitted_alphabet_merge(C,union, []).
+single_value_collapse(V) ->
+ {'SingleValue',ordsets:from_list(single_value_collapse_1(V))}.
-permitted_alphabet_merge([],_,Acc) ->
- lists:reverse(Acc);
-permitted_alphabet_merge([{'SingleValue',L1},
- UorI,
- {'SingleValue',L2}|Rest],UorI,Acc)
- when is_list(L1),is_list(L2) ->
- UI = ordsets:UorI([ordsets:from_list(L1),ordsets:from_list(L2)]),
- permitted_alphabet_merge([{'SingleValue',UI}|Rest],UorI,Acc);
-permitted_alphabet_merge([C1|Rest],UorI,Acc) ->
- permitted_alphabet_merge(Rest,UorI,[C1|Acc]).
-
-
-%% constraint_merge/2
-%% Compute the intersection of the outermost level of the constraint list.
-%% See Dubuisson second paragraph and fotnote on page 285.
-%% If constraints with extension are included in combined constraints. The
-%% resulting combination will have the extension of the last constraint. Thus,
-%% there will be no extension if the last constraint is without extension.
-%% The rootset of all constraints are considered in the "outermoust
-%% intersection". See section 13.1.2 in Dubuisson.
-constraint_merge(St, Cs0) ->
- Cs = constraint_merge_1(St, Cs0),
- normalize_cs(Cs).
-
-normalize_cs([{'SingleValue',[V]}|Cs]) ->
- [{'SingleValue',V}|normalize_cs(Cs)];
-normalize_cs([{'SingleValue',[_|_]=L0}|Cs]) ->
- [H|T] = L = lists:usort(L0),
- [case is_range(H, T) of
- false -> {'SingleValue',L};
- true -> {'ValueRange',{H,lists:last(T)}}
- end|normalize_cs(Cs)];
-normalize_cs([{'ValueRange',{Sv,Sv}}|Cs]) ->
- [{'SingleValue',Sv}|normalize_cs(Cs)];
-normalize_cs([{'ValueRange',{'MIN','MAX'}}|Cs]) ->
- normalize_cs(Cs);
-normalize_cs([{'SizeConstraint',C0}|Cs]) ->
- case normalize_size_constraint(C0) of
- none ->
- normalize_cs(Cs);
- C ->
- [{'SizeConstraint',C}|normalize_cs(Cs)]
- end;
-normalize_cs([H|T]) ->
- [H|normalize_cs(T)];
-normalize_cs([]) -> [].
+single_value_collapse_1([{range,Lb,Ub}|T]) when is_integer(Lb),
+ is_integer(Ub) ->
+ lists:seq(Lb, Ub) ++ single_value_collapse_1(T);
+single_value_collapse_1([]) ->
+ [].
-%% Normalize a size constraint to make it non-ambiguous and
-%% easy to interpret for the backends.
-%%
-%% Returns one of the following terms:
-%% {LowerBound,UpperBound}
-%% {{LowerBound,UpperBound},[]} % Extensible
-%% none % Remove size constraint from list
-%%
-%% where:
-%% LowerBound = integer()
-%% UpperBound = integer() | 'MAX'
-
-normalize_size_constraint(Sv) when is_integer(Sv) ->
- {Sv,Sv};
-normalize_size_constraint({Root,Ext}) when is_list(Ext) ->
- {normalize_size_constraint(Root),[]};
-normalize_size_constraint({{_,_},Ext}) when is_integer(Ext) ->
- normalize_size_constraint(Ext);
-normalize_size_constraint([H|T]) ->
- {H,lists:last(T)};
-normalize_size_constraint({0,'MAX'}) ->
+smart_collapse([{a_range,Ub}]) ->
+ {'ValueRange',{'MIN',Ub}};
+smart_collapse([{a_range,_}|T]) ->
+ {range,_,Ub} = lists:last(T),
+ {'ValueRange',{'MIN',Ub}};
+smart_collapse([{range,Lb,Ub}]) ->
+ {'ValueRange',{Lb,Ub}};
+smart_collapse([_|_]=L) ->
+ V = lists:foldr(fun({range,Lb,Ub}, A) ->
+ seq(Lb, Ub) ++ A
+ end, [], L),
+ {'SingleValue',V}.
+
+size_constraint_collapse([{range,0,'MAX'}]) ->
none;
-normalize_size_constraint({Lb,Ub}=Range)
- when is_integer(Lb), is_integer(Ub) orelse Ub =:= 'MAX' ->
- Range.
+size_constraint_collapse(Root) ->
+ [{range,Lb,_}|_] = Root,
+ {range,_,Ub} = lists:last(Root),
+ {Lb,Ub}.
-is_range(Prev, [H|T]) when Prev =:= H - 1 -> is_range(H, T);
-is_range(_, [_|_]) -> false;
-is_range(_, []) -> true.
+seq(Same, Same) ->
+ [Same];
+seq(Lb, Ub) when is_integer(Lb), is_integer(Ub) ->
+ lists:seq(Lb, Ub).
-constraint_merge_1(_S, [H]=C) when is_tuple(H) ->
- C;
-constraint_merge_1(_S, []) ->
- [];
-constraint_merge_1(S, C) ->
- %% skip all extension but the last extension
- C1 = filter_extensions(C),
- %% perform all internal level intersections, intersections first
- %% since they have precedence over unions
- C2 = lists:map(fun(X)when is_list(X)->constraint_intersection(S,X);
- (X) -> X end,
- C1),
- %% perform all internal level unions
- C3 = lists:map(fun(X)when is_list(X)->constraint_union(S,X);
- (X) -> X end,
- C2),
-
- %% now get intersection of the outermost level
- %% get the least common single value constraint
- SVs = get_constraints(C3,'SingleValue'),
- CombSV = intersection_of_sv(S,SVs),
- %% get the least common value range constraint
- VRs = get_constraints(C3,'ValueRange'),
- CombVR = intersection_of_vr(S,VRs),
- %% get the least common size constraint
- SZs = get_constraints(C3,'SizeConstraint'),
- CombSZ = intersection_of_size(S,SZs),
- RestC = ordsets:subtract(ordsets:from_list(C3),
- ordsets:from_list(SZs ++ VRs ++ SVs)),
- %% get the least common combined constraint. That is the union of each
- %% deep constraint and merge of single value and value range constraints.
- %% FIXME: Removing 'intersection' from the flattened list essentially
- %% means that intersections are converted to unions!
- Cs = combine_constraints(S, CombSV, CombVR, CombSZ++RestC),
- [X || X <- lists:flatten(Cs),
- X =/= intersection,
- X =/= union].
-
-%% constraint_union(S,C) takes a list of constraints as input and
-%% merge them to a union. Unions are performed when two
-%% constraints is found with an atom union between.
-%% The list may be nested. Fix that later !!!
-constraint_union(_S,[]) ->
- [];
-constraint_union(_S,C=[_E]) ->
- C;
-constraint_union(S,C) when is_list(C) ->
- case lists:member(union,C) of
- true ->
- constraint_union1(S,C,[]);
- _ ->
- C
- end;
-% SV = get_constraints(C,'SingleValue'),
-% SV1 = constraint_union_sv(S,SV),
-% VR = get_constraints(C,'ValueRange'),
-% VR1 = constraint_union_vr(VR),
-% RestC = ordsets:filter(fun({'SingleValue',_})->false;
-% ({'ValueRange',_})->false;
-% (_) -> true end,ordsets:from_list(C)),
-% SV1++VR1++RestC;
-constraint_union(_S,C) ->
- [C].
-
-constraint_union1(S, [{'ValueRange',{Lb1,Ub1}},union,
- {'ValueRange',{Lb2,Ub2}}|Rest], Acc) ->
- AunionB = {'ValueRange',{c_min(Lb1, Lb2),max(Ub1, Ub2)}},
- constraint_union1(S, [AunionB|Rest], Acc);
-constraint_union1(S,[A={'SingleValue',_},union,B={'SingleValue',_}|Rest],Acc) ->
- AunionB = constraint_union_sv(S,[A,B]),
- constraint_union1(S,Rest,Acc ++ AunionB);
-constraint_union1(S,[A={'SingleValue',_},union,B={'ValueRange',_}|Rest],Acc) ->
- AunionB = union_sv_vr(S,A,B),
- constraint_union1(S, AunionB++Rest, Acc);
-constraint_union1(S,[A={'ValueRange',_},union,B={'SingleValue',_}|Rest],Acc) ->
- AunionB = union_sv_vr(S,B,A),
- constraint_union1(S, AunionB++Rest, Acc);
-constraint_union1(S,[union|Rest],Acc) -> %skip when unsupported constraints
- constraint_union1(S,Rest,Acc);
-constraint_union1(S,[A|Rest],Acc) ->
- constraint_union1(S,Rest,[A|Acc]);
-constraint_union1(_S,[],Acc) ->
- Acc.
+%%%-----------------------------------------
+%% If the constraint value is a defined value the valuename
+%% is replaced by the actual value
+%%
+resolve_value(S, HostType, Val) ->
+ Id = match_parameter(S, Val),
+ resolve_value1(S, HostType, Id).
-constraint_union_sv(_S,SV) ->
- Values=lists:map(fun({_,V})->V end,SV),
- case ordsets:from_list(Values) of
- [] -> [];
- [N] -> [{'SingleValue',N}];
- L -> [{'SingleValue',L}]
- end.
-c_min('MIN', _) -> 'MIN';
-c_min(_, 'MIN') -> 'MIN';
-c_min(A, B) -> min(A, B).
-
-union_sv_vr(_S,{'SingleValue',SV},VR)
- when is_integer(SV) ->
- union_sv_vr(_S,{'SingleValue',[SV]},VR);
-union_sv_vr(_S,{'SingleValue',SV},{'ValueRange',{VLb,VUb}})
- when is_list(SV) ->
- L = lists:sort(SV++[VLb,VUb]),
- {Lb,L1} = case lists:member('MIN',L) of
- true -> {'MIN',L--['MIN']}; % remove 'MIN' so it does not disturb
- false -> {hd(L),tl(L)}
- end,
- Ub = case lists:member('MAX',L1) of
- true -> 'MAX';
- false -> lists:last(L1)
- end,
- case SV of
- [H] -> H;
- _ -> SV
- end,
- %% for now we through away the Singlevalues so that they don't disturb
- %% in the code generating phase (the effective Valuerange is already
- %% calculated. If we want to keep the Singlevalues as well for
- %% use in code gen phases we need to introduce a new representation
- %% like {'ValueRange',{Lb,Ub},[ListOfRanges|AntiValues|Singlevalues]
- %% These could be used to generate guards which allows only the specific
- %% values , not the full range
- [{'ValueRange',{Lb,Ub}}].
-
-
-%% get_constraints/2
-%% Arguments are a list of constraints, which has the format {key,value},
-%% and a constraint type
-%% Returns a list of constraints only of the requested type or the atom
-%% 'no' if no such constraints were found
-get_constraints(L=[{CType,_}],CType) ->
- L;
-get_constraints(C,CType) ->
- keysearch_allwithkey(CType,1,C).
-
-%% keysearch_allwithkey(Key,Ix,L)
-%% Types:
-%% Key = is_atom()
-%% Ix = integer()
-%% L = [TwoTuple]
-%% TwoTuple = [{atom(),term()}|...]
-%% Returns a List that contains all
-%% elements from L that has a key Key as element Ix
-keysearch_allwithkey(Key,Ix,L) ->
- lists:filter(fun(X) when is_tuple(X) ->
- case element(Ix,X) of
- Key -> true;
- _ -> false
- end;
- (_) -> false
- end, L).
-
-
-%% filter_extensions(C)
-%% takes a list of constraints as input and returns a list with the
-%% constraints and all extensions but the last are removed.
-filter_extensions([L]) when is_list(L) ->
- [filter_extensions(L)];
-filter_extensions(C=[_H]) ->
- C;
-filter_extensions(C) when is_list(C) ->
- filter_extensions(C,[], []).
-
-filter_extensions([],Acc,[]) ->
- Acc;
-filter_extensions([],Acc,[EC|ExtAcc]) ->
- CwoExt = remove_extension(ExtAcc,[]),
- CwoExt ++ [EC|Acc];
-filter_extensions([C={A,_E}|T],Acc,ExtAcc) when is_tuple(A) ->
- filter_extensions(T,Acc,[C|ExtAcc]);
-filter_extensions([C={'SizeConstraint',{A,_B}}|T],Acc,ExtAcc)
- when is_list(A);is_tuple(A) ->
- filter_extensions(T,Acc,[C|ExtAcc]);
-filter_extensions([C={'PermittedAlphabet',{{'SingleValue',_},E}}|T],Acc,ExtAcc)
- when is_tuple(E); is_list(E) ->
- filter_extensions(T,Acc,[C|ExtAcc]);
-filter_extensions([H|T],Acc,ExtAcc) ->
- filter_extensions(T,[H|Acc],ExtAcc).
-
-remove_extension([],Acc) ->
- Acc;
-remove_extension([{'SizeConstraint',{A,_B}}|R],Acc) ->
- remove_extension(R,[{'SizeConstraint',A}|Acc]);
-remove_extension([{C,_E}|R],Acc) when is_tuple(C) ->
- remove_extension(R,[C|Acc]);
-remove_extension([{'PermittedAlphabet',{A={'SingleValue',_},
- E}}|R],Acc)
- when is_tuple(E);is_list(E) ->
- remove_extension(R,[{'PermittedAlphabet',A}|Acc]).
-
-%% constraint_intersection(S,C) takes a list of constraints as input and
-%% performs intersections. Intersecions are performed when an
-%% atom intersection is found between two constraints.
-%% The list may be nested. Fix that later !!!
-constraint_intersection(_S,[]) ->
- [];
-constraint_intersection(_S,C=[_E]) ->
- C;
-constraint_intersection(S,C) when is_list(C) ->
-% io:format("constraint_intersection: ~p~n",[C]),
- case lists:member(intersection,C) of
- true ->
- constraint_intersection1(S,C,[]);
- _ ->
- C
+resolve_value1(S, HostType, #'Externalvaluereference'{value=Name}=ERef) ->
+ case resolve_namednumber(S, HostType, Name) of
+ V when is_integer(V) ->
+ V;
+ not_named ->
+ resolve_value1(S, HostType, get_referenced_value(S, ERef))
end;
-constraint_intersection(_S,C) ->
- [C].
-
-constraint_intersection1(S,[A,intersection,B|Rest],Acc) ->
- AisecB = c_intersect(S,A,B),
- constraint_intersection1(S, AisecB++Rest, Acc);
-constraint_intersection1(S,[A|Rest],Acc) ->
- constraint_intersection1(S,Rest,[A|Acc]);
-constraint_intersection1(_, [], [C]) ->
- C;
-constraint_intersection1(_,[],Acc) ->
- lists:reverse(Acc).
-
-c_intersect(S,C1={'SingleValue',_},C2={'SingleValue',_}) ->
- intersection_of_sv(S,[C1,C2]);
-c_intersect(S,C1={'ValueRange',_},C2={'ValueRange',_}) ->
- intersection_of_vr(S,[C1,C2]);
-c_intersect(S,C1={'ValueRange',_},C2={'SingleValue',_}) ->
- intersection_sv_vr(S,[C2],[C1]);
-c_intersect(S,C1={'SingleValue',_},C2={'ValueRange',_}) ->
- intersection_sv_vr(S,[C1],[C2]);
-c_intersect(_S,C1,C2) ->
- [C1,C2].
-
-%% combine_constraints(S,SV,VR,CComb)
-%% Types:
-%% S = is_record(state,S)
-%% SV = [] | [SVC]
-%% VR = [] | [VRC]
-%% CComb = [] | [Lists]
-%% SVC = {'SingleValue',integer()} | {'SingleValue',[integer(),...]}
-%% VRC = {'ValueRange',{Lb,Ub}}
-%% Lists = List of lists containing any constraint combination
-%% Lb = 'MIN' | integer()
-%% Ub = 'MAX' | integer()
-%% Returns a combination of the least common constraint among SV,VR and all
-%% elements in CComb
-combine_constraints(_S,[],VR,CComb) ->
- VR ++ CComb;
-% combine_combined_cnstr(S,VR,CComb);
-combine_constraints(_S,SV,[],CComb) ->
- SV ++ CComb;
-% combine_combined_cnstr(S,SV,CComb);
-combine_constraints(S,SV,VR,CComb) ->
- C=intersection_sv_vr(S,SV,VR),
- C ++ CComb.
-% combine_combined_cnstr(S,C,CComb).
-
-intersection_sv_vr(_S,[C1={'SingleValue',SV}],[C2={'ValueRange',{_Lb,_Ub}}])
- when is_integer(SV) ->
- case is_int_in_vr(SV,C2) of
- true -> [C1];
- _ -> %%error({type,{"asn1 illegal constraint",C1,C2},S})
- %throw({error,{"asn1 illegal constraint",C1,C2}})
- %io:format("warning: could not analyze constraint ~p~n",[[C1,C2]]),
- [C1,C2]
+resolve_value1(S, HostType, {gt,V}) ->
+ case resolve_value1(S, HostType, V) of
+ Int when is_integer(Int) ->
+ Int + 1;
+ _Other ->
+ asn1_error(S, illegal_integer_value)
end;
-intersection_sv_vr(_S,[C1={'SingleValue',SV}],[C2])
- when is_list(SV) ->
- case lists:filter(fun(X)->is_int_in_vr(X,C2) end,SV) of
- [] ->
- %%error({type,{"asn1 illegal constraint",C1,C2},S});
- %throw({error,{"asn1 illegal constraint",C1,C2}});
- %io:format("warning: could not analyze constraint ~p~n",[[C1,C2]]),
- [C1,C2];
- [V] -> [{'SingleValue',V}];
- L -> [{'SingleValue',L}]
- end.
-
-
-%% Size constraint [{'SizeConstraint',1},{'SizeConstraint',{{1,64},[]}}]
-
-intersection_of_size(_,[]) ->
- [];
-intersection_of_size(_,C=[_SZ]) ->
- C;
-intersection_of_size(S,[SZ,SZ|Rest]) ->
- intersection_of_size(S,[SZ|Rest]);
-intersection_of_size(S,C=[C1={_,Int},{_,Range}|Rest])
- when is_integer(Int),is_tuple(Range) ->
- case Range of
- {Lb,Ub} when Int >= Lb,
- Int =< Ub ->
- intersection_of_size(S,[C1|Rest]);
- {{Lb,Ub},Ext} when is_list(Ext),Int >= Lb,Int =< Ub ->
- intersection_of_size(S,[C1|Rest]);
- _ ->
- throw({error,{asn1,{illegal_size_constraint,C}}})
+resolve_value1(S, HostType, {lt,V}) ->
+ case resolve_value1(S, HostType, V) of
+ Int when is_integer(Int) ->
+ Int - 1;
+ _Other ->
+ asn1_error(S, illegal_integer_value)
end;
-intersection_of_size(S,[C1={_,Range},C2={_,Int}|Rest])
- when is_integer(Int),is_tuple(Range) ->
- intersection_of_size(S,[C2,C1|Rest]);
-intersection_of_size(S,[{_,{Lb1,Ub1}},{_,{Lb2,Ub2}}|Rest]) ->
- Lb=greatest_LB(ordsets:from_list([Lb1,Lb2])),
- Ub=smallest_UB(ordsets:from_list([Ub1,Ub2])),
- intersection_of_size(S,[{'SizeConstraint',{Lb,Ub}}|Rest]);
-intersection_of_size(_,SZ) ->
- throw({error,{asn1,{illegal_size_constraint,SZ}}}).
-
-intersection_of_vr(_,[]) ->
- [];
-intersection_of_vr(_,VR=[_C]) ->
- VR;
-intersection_of_vr(S,[{_,{Lb1,Ub1}},{_,{Lb2,Ub2}}|Rest]) ->
- Lb=greatest_LB(ordsets:from_list([Lb1,Lb2])),
- Ub=smallest_UB(ordsets:from_list([Ub1,Ub2])),
- intersection_of_vr(S,[{'ValueRange',{Lb,Ub}}|Rest]);
-intersection_of_vr(_S,VR) ->
- %%error({type,{asn1,{illegal_value_range_constraint,VR}},S});
- throw({error,{asn1,{illegal_value_range_constraint,VR}}}).
-
-intersection_of_sv(_,[]) ->
- [];
-intersection_of_sv(_,SV=[_C]) ->
- SV;
-intersection_of_sv(S,[SV,SV|Rest]) ->
- intersection_of_sv(S,[SV|Rest]);
-intersection_of_sv(S,[{_,Int},{_,SV}|Rest]) when is_integer(Int),
- is_list(SV) ->
- SV2=intersection_of_sv1(S,Int,SV),
- intersection_of_sv(S,[SV2|Rest]);
-intersection_of_sv(S,[{_,SV},{_,Int}|Rest]) when is_integer(Int),
- is_list(SV) ->
- SV2=intersection_of_sv1(S,Int,SV),
- intersection_of_sv(S,[SV2|Rest]);
-intersection_of_sv(S,[{_,SV1},{_,SV2}|Rest]) when is_list(SV1),
- is_list(SV2) ->
- SV3=common_set(SV1,SV2),
- intersection_of_sv(S,[SV3|Rest]);
-intersection_of_sv(_S,SV) ->
- %%error({type,{asn1,{illegal_single_value_constraint,SV}},S}).
- throw({error,{asn1,{illegal_single_value_constraint,SV}}}).
-
-intersection_of_sv1(_S,Int,SV) when is_integer(Int),is_list(SV) ->
- case lists:member(Int,SV) of
- true -> {'SingleValue',Int};
+resolve_value1(S, _HostType, {'ValueFromObject',{object,Object},FieldName}) ->
+ get_value_from_object(S, Object, FieldName);
+resolve_value1(_, _, #valuedef{checked=true,value=V}) ->
+ V;
+resolve_value1(S, _, #valuedef{value={'ValueFromObject',
+ {object,Object},FieldName}}) ->
+ get_value_from_object(S, Object, FieldName);
+resolve_value1(S, _HostType, #valuedef{}=VDef) ->
+ #valuedef{value=Val} = check_value(S,VDef),
+ Val;
+resolve_value1(_, _, V) ->
+ V.
+
+resolve_namednumber(S, #type{def=Def}, Name) ->
+ case Def of
+ {'ENUMERATED',NameList} ->
+ resolve_namednumber_1(S, Name, NameList);
+ {'INTEGER',NameList} ->
+ resolve_namednumber_1(S, Name, NameList);
_ ->
- %%error({type,{asn1,{illegal_single_value_constraint,Int,SV}},S})
- throw({error,{asn1,{illegal_single_value_constraint,Int,SV}}})
- end;
-intersection_of_sv1(_S,SV1,SV2) ->
- %%error({type,{asn1,{illegal_single_value_constraint,SV1,SV2}},S}).
- throw({error,{asn1,{illegal_single_value_constraint,SV1,SV2}}}).
+ not_named
+ end.
-greatest_LB([H]) ->
- H;
-greatest_LB(L) ->
- greatest_LB1(lists:reverse(L)).
-greatest_LB1(['MIN',H2|_T])->
- H2;
-greatest_LB1([H|_T]) ->
- H.
-smallest_UB(L) ->
- hd(L).
-
-common_set(SV1,SV2) ->
- lists:filter(fun(X)->lists:member(X,SV1) end,SV2).
-
-is_int_in_vr(Int,{_,{'MIN','MAX'}}) when is_integer(Int) ->
- true;
-is_int_in_vr(Int,{_,{'MIN',Ub}}) when is_integer(Int),Int =< Ub ->
- true;
-is_int_in_vr(Int,{_,{Lb,'MAX'}}) when is_integer(Int),Int >= Lb ->
- true;
-is_int_in_vr(Int,{_,{Lb,Ub}}) when is_integer(Int),Int >= Lb,Int =< Ub ->
- true;
-is_int_in_vr(_,_) ->
- false.
-
+resolve_namednumber_1(S, Name, NameList) ->
+ try
+ NamedNumberList = check_enumerated(S, NameList),
+ {_,N} = lookup_enum_value(S, Name, NamedNumberList),
+ N
+ catch _:_ ->
+ not_named
+ end.
+
+%%%
+%%% End of constraint handling.
+%%%
check_imported(S,Imodule,Name) ->
check_imported(S,Imodule,Name,false).
@@ -4510,18 +3757,28 @@ check_reference(S,#'Externaltypereference'{pos=Pos,module=Emod,type=Name}) ->
#'Externaltypereference'{pos=Pos,module=ModName,type=Name}
end.
+get_referenced_value(S, T) ->
+ case get_referenced_type(S, T) of
+ {ExtMod,#valuedef{value=#'Externalvaluereference'{}=Ref}} ->
+ get_referenced_value(update_state(S, ExtMod), Ref);
+ {_,#valuedef{value=Val}} ->
+ Val
+ end.
+
get_referenced_type(S, T) ->
+ get_referenced_type(S, T, false).
+
+get_referenced_type(S, T, Recurse) ->
case do_get_referenced_type(S, T) of
- {_,#type{def=#'Externaltypereference'{}=ERef}} ->
- get_referenced_type(S, ERef);
- {_,#type{def=#'Externalvaluereference'{}=VRef}} ->
- get_referenced_type(S, VRef);
+ {_,#typedef{typespec=#type{def=#'Externaltypereference'{}=ERef}}}
+ when Recurse ->
+ get_referenced_type(S, ERef, Recurse);
{_,_}=Res ->
Res
end.
-do_get_referenced_type(#state{parameters=Ps}=S, T0) ->
- case match_parameters(S, T0, Ps) of
+do_get_referenced_type(S, T0) ->
+ case match_parameter(S, T0) of
T0 ->
do_get_ref_type_1(S, T0);
T ->
@@ -4563,7 +3820,7 @@ get_referenced(S,Emod,Ename,Pos) ->
%% May be an imported entity in module Emod or Emod may not exist
case asn1_db:dbget(Emod,'MODULE') of
undefined ->
- throw({error,{asn1,{module_not_found,Emod}}});
+ asn1_error(S, {undefined_import, Ename, Emod});
_ ->
NewS = update_state(S,Emod),
get_imported(NewS,Ename,Emod,Pos)
@@ -4593,12 +3850,11 @@ get_imported(S,Name,Module,Pos) ->
parse_and_save(S,Imodule),
case asn1_db:dbget(Imodule,'MODULE') of
undefined ->
- throw({error,{asn1,{module_not_found,Imodule}}});
+ asn1_error(S, {undefined_import, Name, Module});
Im when is_record(Im,module) ->
case is_exported(Im,Name) of
false ->
- throw({error,
- {asn1,{not_exported,{Im,Name}}}});
+ asn1_error(S, {undefined_export, Name});
_ ->
?dbg("get_imported, is_exported ~p, ~p~n",[Imodule,Name]),
get_referenced_type(S,
@@ -4611,37 +3867,6 @@ get_imported(S,Name,Module,Pos) ->
get_renamed_reference(S,Name,Module)
end.
-check_and_save(S,#'Externaltypereference'{module=M}=ERef,#typedef{checked=false}=TDef,Settings)
- when S#state.mname /= M ->
- %% This ERef is an imported type (or maybe a set.asn compilation)
- NewS = S#state{mname=M,module=load_asn1_module(S,M),
- type=TDef,tname=get_datastr_name(TDef)},
- Type=check_type(NewS,TDef,TDef#typedef.typespec),%XXX
- CheckedTDef = TDef#typedef{checked=true,
- typespec=Type},
- asn1_db:dbput(M,get_datastr_name(TDef),CheckedTDef),
- {merged_name(S,ERef),Settings};
-check_and_save(S,#'Externaltypereference'{module=M,type=N}=Eref,
- #ptypedef{name=Name,args=Params} = PTDef,Settings) ->
- %% instantiate a parameterized type
- %% The parameterized type should be saved as a type in the module
- %% it was instantiated.
- NewS = S#state{mname=M,module=load_asn1_module(S,M),
- type=PTDef,tname=Name},
- {Args,RestSettings} = lists:split(length(Params),Settings),
- Type = check_type(NewS,PTDef,#type{def={pt,Eref,Args}}),
- ERefName = new_reference_name(N),
- ERefNew = #'Externaltypereference'{type=ERefName,module=S#state.mname},
- NewTDef=#typedef{checked=true,name=ERefName,
- typespec=Type},
- insert_once(S,parameterized_objects,{ERefName,type,NewTDef}),
- asn1_db:dbput(S#state.mname,ERefNew#'Externaltypereference'.type,
- NewTDef),
- {ERefNew,RestSettings};
-check_and_save(_S,ERef,TDef,Settings) ->
- %% This might be a renamed type in a set of specs, so rename the ERef
- {ERef#'Externaltypereference'{type=asn1ct:get_name_of_def(TDef)},Settings}.
-
save_object_set_instance(S,Name,ObjSetSpec)
when is_record(ObjSetSpec,'ObjectSet') ->
NewObjSet = #typedef{checked=true,name=Name,typespec=ObjSetSpec},
@@ -4708,18 +3933,14 @@ update_state(S,ModuleName) ->
S;
_ ->
parse_and_save(S,ModuleName),
- case asn1_db:dbget(ModuleName,'MODULE') of
- RefedMod when is_record(RefedMod,module) ->
- S#state{mname=ModuleName,module=RefedMod};
- _ -> throw({error,{asn1,{module_does_not_exist,ModuleName}}})
- end
+ Mod = #module{} = asn1_db:dbget(ModuleName,'MODULE'),
+ S#state{mname=ModuleName,module=Mod}
end.
-
get_renamed_reference(S,Name,Module) ->
case renamed_reference(S,Name,Module) of
undefined ->
- throw({error,{asn1,{undefined_type,Name}}});
+ asn1_error(S, {undefined, Name});
NewTypeName when NewTypeName =/= Name ->
get_referenced1(S,Module,NewTypeName,undefined)
end.
@@ -4770,37 +3991,49 @@ get_importmoduleoftype([I|Is],Name) ->
get_importmoduleoftype([],_) ->
undefined.
+match_parameters(S, Names) ->
+ [match_parameter(S, Name) || Name <- Names].
-match_parameters(_S,Name,[]) ->
- Name;
+match_parameter(#state{parameters=Ps}=S, Name) ->
+ match_parameter(S, Name, Ps).
-match_parameters(_S,#'Externaltypereference'{type=Name},[{#'Externaltypereference'{type=Name},NewName}|_T]) ->
+match_parameter(_S, Name, []) ->
+ Name;
+match_parameter(S, {valueset,{element_set,#type{}=Ts,none}}, Ps) ->
+ match_parameter(S, {valueset,Ts}, Ps);
+match_parameter(_S, #'Externaltypereference'{type=Name},
+ [{#'Externaltypereference'{type=Name},NewName}|_T]) ->
NewName;
-match_parameters(_S,#'Externaltypereference'{type=Name},[{{_,#'Externaltypereference'{type=Name}},NewName}|_T]) ->
+match_parameter(_S, #'Externaltypereference'{type=Name},
+ [{{_,#'Externaltypereference'{type=Name}},NewName}|_T]) ->
NewName;
-match_parameters(_S,#'Externalvaluereference'{value=Name},[{#'Externalvaluereference'{value=Name},NewName}|_T]) ->
+match_parameter(_S, #'Externalvaluereference'{value=Name},
+ [{#'Externalvaluereference'{value=Name},NewName}|_T]) ->
NewName;
-match_parameters(_S,#'Externalvaluereference'{value=Name},[{{_,#'Externalvaluereference'{value=Name}},NewName}|_T]) ->
+match_parameter(_S, #'Externalvaluereference'{value=Name},
+ [{{_,#'Externalvaluereference'{value=Name}},NewName}|_T]) ->
NewName;
-match_parameters(_S,#type{def=#'Externaltypereference'{module=M,type=Name}},
- [{#'Externaltypereference'{module=M,type=Name},Type}]) ->
+match_parameter(_S, #type{def=#'Externaltypereference'{module=M,type=Name}},
+ [{#'Externaltypereference'{module=M,type=Name},Type}]) ->
Type;
-match_parameters(_S,{valueset,#type{def=#'Externaltypereference'{type=Name}}},
- [{{_,#'Externaltypereference'{type=Name}},{valueset,#type{def=NewName}}}|_T]) ->
+match_parameter(_S, {valueset,#type{def=#'Externaltypereference'{type=Name}}},
+ [{{_,#'Externaltypereference'{type=Name}},
+ {valueset,#type{def=NewName}}}|_T]) ->
NewName;
-match_parameters(_S,{valueset,#type{def=#'Externaltypereference'{type=Name}}},
- [{{_,#'Externaltypereference'{type=Name}},
- NewName=#type{def=#'Externaltypereference'{}}}|_T]) ->
+match_parameter(_S, {valueset,#type{def=#'Externaltypereference'{type=Name}}},
+ [{{_,#'Externaltypereference'{type=Name}},
+ NewName=#type{def=#'Externaltypereference'{}}}|_T]) ->
NewName#type.def;
-match_parameters(_S,{valueset,#type{def=#'Externaltypereference'{type=Name}}},
- [{{_,#'Externaltypereference'{type=Name}},NewName}|_T]) ->
+match_parameter(_S, {valueset,#type{def=#'Externaltypereference'{type=Name}}},
+ [{{_,#'Externaltypereference'{type=Name}},NewName}|_T]) ->
NewName;
%% When a parameter is a parameterized element it has to be
%% instantiated now!
-match_parameters(S,{valueset,T=#type{def={pt,_,_Args}}},_Parameters) ->
- case catch check_type(S,#typedef{name=S#state.tname,typespec=T},T) of
- pobjectsetdef ->
-
+match_parameter(S, {valueset,T=#type{def={pt,_,_Args}}}, _Ps) ->
+ try check_type(S,#typedef{name=S#state.tname,typespec=T},T) of
+ #type{def=Ts} ->
+ Ts
+ catch pobjectsetdef ->
{_,ObjRef,_Params} = T#type.def,
{_,ObjDef}=get_referenced_type(S,ObjRef),
%%ObjDef is a pvaluesetdef where the type field holds the class
@@ -4818,17 +4051,15 @@ match_parameters(S,{valueset,T=#type{def={pt,_,_Args}}},_Parameters) ->
ObjectSet = #'ObjectSet'{class=RightClassRef,set=T},
ObjSpec = check_object(S,#typedef{typespec=ObjectSet},ObjectSet),
Name = list_to_atom(asn1ct_gen:list2name([get_datastr_name(ObjDef)|S#state.recordtopname])),
- save_object_set_instance(S,Name,ObjSpec);
- pvaluesetdef -> error({pvaluesetdef,"parameterized valueset",S});
- {error,_Reason} -> error({type,"error in parameter",S});
- Ts when is_record(Ts,type) -> Ts#type.def
+ save_object_set_instance(S,Name,ObjSpec)
end;
+
%% same as previous, only depends on order of parsing
-match_parameters(S,{valueset,{pos,{objectset,_,POSref},Args}},Parameters) ->
- match_parameters(S,{valueset,#type{def={pt,POSref,Args}}},Parameters);
-match_parameters(S,Name, [_H|T]) ->
- %%io:format("match_parameters(~p,~p)~n",[Name,[H|T]]),
- match_parameters(S,Name,T).
+match_parameter(S, {valueset,{pos,{objectset,_,POSref},Args}}, Ps) ->
+ match_parameter(S, {valueset,#type{def={pt,POSref,Args}}}, Ps);
+match_parameter(S, Name, [_H|T]) ->
+ %%io:format("match_parameter(~p,~p)~n",[Name,[H|T]]),
+ match_parameter(S, Name, T).
imported(S,Name) ->
{imports,Ilist} = (S#state.module)#module.imports,
@@ -4854,7 +4085,6 @@ check_named_number_list(_S, [{_,_}|_]=NNL) ->
NNL;
check_named_number_list(S, NNL0) ->
%% Check that the names are unique.
- T = S#state.type,
case check_unique(NNL0, 2) of
[] ->
NNL1 = [{Id,resolve_valueref(S, Val)} || {'NamedNumber',Id,Val} <- NNL0],
@@ -4863,14 +4093,14 @@ check_named_number_list(S, NNL0) ->
[] ->
NNL;
[Val|_] ->
- asn1_error(S, T, {value_reused,Val})
+ asn1_error(S, {value_reused,Val})
end;
[H|_] ->
- asn1_error(S, T, {namelist_redefinition,H})
+ asn1_error(S, {namelist_redefinition,H})
end.
-resolve_valueref(S, #'Externalvaluereference'{module=Mod,value=Name}) ->
- dbget_ex(S, Mod, Name);
+resolve_valueref(S, #'Externalvaluereference'{} = T) ->
+ get_referenced_value(S, T);
resolve_valueref(_, Val) when is_integer(Val) ->
Val.
@@ -4879,7 +4109,7 @@ check_integer(S, NNL) ->
check_bitstring(S, NNL0) ->
NNL = check_named_number_list(S, NNL0),
- _ = [asn1_error(S, S#state.type, {invalid_bit_number,Bit}) ||
+ _ = [asn1_error(S, {invalid_bit_number,Bit}) ||
{_,Bit} <- NNL, Bit < 0],
NNL.
@@ -4904,7 +4134,7 @@ check_type_identifier(S, Eref=#'Externaltypereference'{type=Class}) ->
{_,TD=#typedef{typespec=#type{def=#'Externaltypereference'{}}}} ->
check_type_identifier(S, (TD#typedef.typespec)#type.def);
_ ->
- asn1_error(S, S#state.type, {illegal_instance_of,Class})
+ asn1_error(S, {illegal_instance_of,Class})
end.
iof_associated_type(S,[]) ->
@@ -4913,12 +4143,7 @@ iof_associated_type(S,[]) ->
case get(instance_of) of
undefined ->
AssociateSeq = iof_associated_type1(S,[]),
- Tag =
- case S#state.erule of
- ber ->
- [?TAG_CONSTRUCTED(?N_INSTANCE_OF)];
- _ -> []
- end,
+ Tag = [?TAG_CONSTRUCTED(?N_INSTANCE_OF)],
TypeDef=#typedef{checked=true,
name='INSTANCE OF',
typespec=#type{tag=Tag,
@@ -4944,16 +4169,11 @@ iof_associated_type1(S,C) ->
[] -> 'ASN1_OPEN_TYPE';
_ -> {typefield,'Type'}
end,
- {ObjIdTag,C1TypeTag}=
- case S#state.erule of
- ber ->
- {[{'UNIVERSAL',8}],
- [#tag{class='UNIVERSAL',
- number=6,
- type='IMPLICIT',
- form=0}]};
- _ -> {[{'UNIVERSAL','INTEGER'}],[]}
- end,
+ ObjIdTag = [{'UNIVERSAL',8}],
+ C1TypeTag = [#tag{class='UNIVERSAL',
+ number=6,
+ type='IMPLICIT',
+ form=0}],
TypeIdentifierRef=#'Externaltypereference'{module=ModuleName,
type='TYPE-IDENTIFIER'},
ObjectIdentifier =
@@ -4992,9 +4212,13 @@ iof_associated_type1(S,C) ->
%% returns the leading attribute, the constraint of the components and
%% the tablecinf value for the second component.
-instance_of_constraints(_,[]) ->
+instance_of_constraints(_, []) ->
{false,[],[],[]};
-instance_of_constraints(S, [{simpletable,Type}]) ->
+instance_of_constraints(S, [{element_set,{simpletable,C},none}]) ->
+ {element_set,Type,none} = C,
+ instance_of_constraints_1(S, Type).
+
+instance_of_constraints_1(S, Type) ->
#type{def=#'Externaltypereference'{type=Name}} = Type,
ModuleName = S#state.mname,
ObjectSetRef=#'Externaltypereference'{module=ModuleName,
@@ -5014,93 +4238,100 @@ instance_of_constraints(S, [{simpletable,Type}]) ->
valueindex=[]},
{TableCInf,[{simpletable,Name}],CRel,[{objfun,ObjectSetRef}]}.
-%% Check ENUMERATED
-%% ****************************************
-%% Check that all values are unique
-%% assign values to un-numbered identifiers
-%% check that the constraints are allowed and correct
-%% put the updated info back into database
-check_enumerated(_S,[{Name,Number}|_Rest]= NNList,_Constr) when is_atom(Name), is_integer(Number)->
- %% already checked , just return the same list
- NNList;
-check_enumerated(_S,{[{Name,Number}|_Rest],L}= NNList,_Constr) when is_atom(Name), is_integer(Number), is_list(L)->
- %% already checked , contains extension marker, just return the same lists
- NNList;
-check_enumerated(S,NamedNumberList,_Constr) ->
- check_enum(S,NamedNumberList,[],[],[]).
-
-%% identifiers are put in Acc2
-%% returns either [{Name,Number}] or {[{Name,Number}],[{ExtName,ExtNumber}]}
-%% the latter is returned if the ENUMERATION contains EXTENSIONMARK
-check_enum(S,[{'NamedNumber',Id,Num}|T],Acc1,Acc2,Root) when is_integer(Num) ->
- check_enum(S,T,[{Id,Num}|Acc1],Acc2,Root);
-check_enum(S,['EXTENSIONMARK'|T],Acc1,Acc2,_Root) ->
- NewAcc2 = lists:keysort(2,Acc1),
- NewList = enum_number(lists:reverse(Acc2),NewAcc2,0,[],[]),
- { NewList, check_enum(S,T,[],[],enum_counts(NewList))};
-check_enum(S,[Id|T],Acc1,Acc2,Root) when is_atom(Id) ->
- check_enum(S,T,Acc1,[Id|Acc2],Root);
-check_enum(_S,[],Acc1,Acc2,Root) ->
- NewAcc2 = lists:keysort(2,Acc1),
- enum_number(lists:reverse(Acc2),NewAcc2,0,[],Root).
-
-
-% assign numbers to identifiers , numbers from 0 ... but must not
-% be the same as already assigned to NamedNumbers
-enum_number(Identifiers,NamedNumbers,Cnt,Acc,[]) ->
- enum_number(Identifiers,NamedNumbers,Cnt,Acc);
-enum_number(Identifiers,NamedNumbers,_Cnt,Acc,CountL) ->
- enum_extnumber(Identifiers,NamedNumbers,Acc,CountL).
-
-enum_number([H|T],[{Id,Num}|T2],Cnt,Acc) when Num > Cnt ->
- enum_number(T,[{Id,Num}|T2],Cnt+1,[{H,Cnt}|Acc]);
-enum_number([H|T],[{Id,Num}|T2],Cnt,Acc) when Num < Cnt -> % negative Num
- enum_number(T,T2,Cnt+1,[{H,Cnt},{Id,Num}|Acc]);
-enum_number([],L2,_Cnt,Acc) ->
- lists:append([lists:reverse(Acc),L2]);
-enum_number(L,[{Id,Num}|T2],Cnt,Acc) -> % Num == Cnt
- enum_number(L,T2,Cnt+1,[{Id,Num}|Acc]);
-enum_number([H|T],[],Cnt,Acc) ->
- enum_number(T,[],Cnt+1,[{H,Cnt}|Acc]).
-
-enum_extnumber(Identifiers,NamedNumbers,Acc,[C]) ->
- check_add_enum_numbers(NamedNumbers,[C]),
- enum_number(Identifiers,NamedNumbers,C,Acc);
-enum_extnumber([H|T],[{Id,Num}|T2],Acc,[C|Counts]) when Num > C ->
- enum_extnumber(T,[{Id,Num}|T2],[{H,C}|Acc],Counts);
-enum_extnumber([],L2,Acc,Cnt) ->
- check_add_enum_numbers(L2, Cnt),
- lists:concat([lists:reverse(Acc),L2]);
-enum_extnumber(_Identifiers,[{Id,Num}|_T2],_Acc,[C|_]) when Num < C ->
-%% enum_extnumber(Identifiers,T2,[{Id,Num}|Acc],Counts);
- exit({error,{asn1,"AdditionalEnumeration element with same number as root element",{Id,Num}}});
-enum_extnumber(Identifiers,[{Id,Num}|T2],Acc,[_C|Counts]) -> % Num =:= C
- enum_extnumber(Identifiers,T2,[{Id,Num}|Acc],Counts);
-enum_extnumber([H|T],[],Acc,[C|Counts]) ->
- enum_extnumber(T,[],[{H,C}|Acc],Counts).
-
-enum_counts([]) ->
- [0];
-enum_counts(L) ->
- Used=[I||{_,I}<-L],
- AddEnumLb = lists:max(Used) + 1,
- lists:foldl(fun(El,AccIn)->lists:delete(El,AccIn) end,
- lists:seq(0,AddEnumLb),
- Used).
-check_add_enum_numbers(L, Cnt) ->
- Max = lists:max(Cnt),
- Fun = fun({_,N}=El) when N < Max ->
- case lists:member(N,Cnt) of
- false ->
- exit({error,{asn1,"AdditionalEnumeration element with same number as root element",El}});
- _ ->
- ok
- end;
- (_) ->
- ok
- end,
- lists:foreach(Fun,L).
+%%%
+%%% Check ENUMERATED.
+%%%
+check_enumerated(_S, [{Name,Number}|_]=NNL)
+ when is_atom(Name), is_integer(Number) ->
+ %% Already checked.
+ NNL;
+check_enumerated(_S, {[{Name,Number}|_],L}=NNL)
+ when is_atom(Name), is_integer(Number), is_list(L) ->
+ %% Already checked (with extension).
+ NNL;
+check_enumerated(S, NNL) ->
+ check_enum_ids(S, NNL, gb_sets:empty()),
+ check_enum(S, NNL, gb_sets:empty(), []).
+
+check_enum_ids(S, [{'NamedNumber',Id,_}|T], Ids0) ->
+ Ids = check_enum_update_ids(S, Id, Ids0),
+ check_enum_ids(S, T, Ids);
+check_enum_ids(S, ['EXTENSIONMARK'|T], Ids) ->
+ check_enum_ids(S, T, Ids);
+check_enum_ids(S, [Id|T], Ids0) when is_atom(Id) ->
+ Ids = check_enum_update_ids(S, Id, Ids0),
+ check_enum_ids(S, T, Ids);
+check_enum_ids(_, [], _) ->
+ ok.
+
+check_enum(S, [{'NamedNumber',Id,N}|T], Used0, Acc) ->
+ Used = check_enum_update_used(S, Id, N, Used0),
+ check_enum(S, T, Used, [{Id,N}|Acc]);
+check_enum(S, ['EXTENSIONMARK'|Ext0], Used0, Acc0) ->
+ Acc = lists:reverse(Acc0),
+ {Root,Used,Cnt} = check_enum_number_root(Acc, Used0, 0, []),
+ Ext = check_enum_ext(S, Ext0, Used, Cnt, []),
+ {Root,Ext};
+check_enum(S, [Id|T], Used, Acc) when is_atom(Id) ->
+ check_enum(S, T, Used, [Id|Acc]);
+check_enum(_, [], Used, Acc0) ->
+ Acc = lists:reverse(Acc0),
+ {Root,_,_} = check_enum_number_root(Acc, Used, 0, []),
+ lists:keysort(2, Root).
+
+check_enum_number_root([Id|T]=T0, Used0, Cnt, Acc) when is_atom(Id) ->
+ case gb_sets:is_element(Cnt, Used0) of
+ false ->
+ Used = gb_sets:insert(Cnt, Used0),
+ check_enum_number_root(T, Used, Cnt+1, [{Id,Cnt}|Acc]);
+ true ->
+ check_enum_number_root(T0, Used0, Cnt+1, Acc)
+ end;
+check_enum_number_root([H|T], Used, Cnt, Acc) ->
+ check_enum_number_root(T, Used, Cnt, [H|Acc]);
+check_enum_number_root([], Used, Cnt, Acc) ->
+ {lists:keysort(2, Acc),Used,Cnt}.
+
+check_enum_ext(S, [{'NamedNumber',Id,N}|T], Used0, C, Acc) ->
+ Used = check_enum_update_used(S, Id, N, Used0),
+ if
+ N < C ->
+ asn1_error(S, {enum_not_ascending,Id,N,C-1});
+ true ->
+ ok
+ end,
+ check_enum_ext(S, T, Used, N+1, [{Id,N}|Acc]);
+check_enum_ext(S, [Id|T]=T0, Used0, C, Acc) when is_atom(Id) ->
+ case gb_sets:is_element(C, Used0) of
+ true ->
+ check_enum_ext(S, T0, Used0, C+1, Acc);
+ false ->
+ Used = gb_sets:insert(C, Used0),
+ check_enum_ext(S, T, Used, C+1, [{Id,C}|Acc])
+ end;
+check_enum_ext(_, [], _, _, Acc) ->
+ lists:keysort(2, Acc).
+
+check_enum_update_ids(S, Id, Ids) ->
+ case gb_sets:is_element(Id, Ids) of
+ false ->
+ gb_sets:insert(Id, Ids);
+ true ->
+ asn1_error(S, {enum_illegal_redefinition,Id})
+ end.
+
+check_enum_update_used(S, Id, N, Used) ->
+ case gb_sets:is_element(N, Used) of
+ false ->
+ gb_sets:insert(N, Used);
+ true ->
+ asn1_error(S, {enum_reused_value,Id,N})
+ end.
+
+%%%
+%%% End of ENUMERATED checking.
+%%%
check_boolean(_S,_Constr) ->
ok.
@@ -5145,7 +4376,7 @@ check_sequence(S,Type,Comps) ->
CompListTuple = complist_as_tuple(NewComps4),
{CRelInf,CompListTuple};
Dupl ->
- throw({error,{asn1,{duplicate_components,Dupl}}})
+ asn1_error(S, {duplicate_identifier, error_value(hd(Dupl))})
end.
complist_as_tuple(CompList) ->
@@ -5155,8 +4386,6 @@ complist_as_tuple([#'EXTENSIONMARK'{}|T], Acc, Ext, Acc2, root) ->
complist_as_tuple(T, Acc, Ext, Acc2, ext);
complist_as_tuple([#'EXTENSIONMARK'{}|T], Acc, Ext, Acc2, ext) ->
complist_as_tuple(T, Acc, Ext, Acc2, root2);
-complist_as_tuple([#'EXTENSIONMARK'{}|_T], _Acc, _Ext, _Acc2, root2) ->
- throw({error,{asn1,{too_many_extension_marks}}});
complist_as_tuple([C|T], Acc, Ext, Acc2, root) ->
complist_as_tuple(T, [C|Acc], Ext, Acc2, root);
complist_as_tuple([C|T], Acc, Ext, Acc2, ext) ->
@@ -5199,11 +4428,11 @@ expand_components2(S,{_,PT={pt,_,_}}) ->
expand_components2(S,{_,OCFT = #'ObjectClassFieldType'{}}) ->
UncheckedType = #type{def=OCFT},
Type = check_type(S,#typedef{typespec=UncheckedType},UncheckedType),
- expand_components2(S,{undefined,oCFT_def(S,Type)});
+ expand_components2(S, {undefined,ocft_def(Type)});
expand_components2(S,{_,ERef}) when is_record(ERef,'Externaltypereference') ->
expand_components2(S,get_referenced_type(S,ERef));
-expand_components2(_S,Err) ->
- throw({error,{asn1,{illegal_COMPONENTS_OF,Err}}}).
+expand_components2(S,{_, What}) ->
+ asn1_error(S, {illegal_COMPONENTS_OF, error_value(What)}).
take_only_rootset([])->
[];
@@ -5252,7 +4481,7 @@ check_sequenceof(S,Type,Component) when is_record(Component,type) ->
check_set(S,Type,Components) ->
{TableCInf,NewComponents} = check_sequence(S,Type,Components),
- check_distinct_tags(NewComponents,[]),
+ check_unique_tags(S, collect_components(NewComponents), []),
case {lists:member(der,S#state.options),S#state.erule} of
{true,_} ->
{Sorted,SortedComponents} = sort_components(der,S,NewComponents),
@@ -5264,35 +4493,21 @@ check_set(S,Type,Components) ->
{false,TableCInf,NewComponents}
end.
-
-%% check that all tags are distinct according to X.680 26.3
-check_distinct_tags({C1,C2,C3},Acc) when is_list(C1),is_list(C2),is_list(C3) ->
- check_distinct_tags(C1++C2++C3,Acc);
-check_distinct_tags({C1,C2},Acc) when is_list(C1),is_list(C2) ->
- check_distinct_tags(C1++C2,Acc);
-check_distinct_tags([#'ComponentType'{tags=[T]}|Cs],Acc) ->
- check_distinct(T,Acc),
- check_distinct_tags(Cs,[T|Acc]);
-check_distinct_tags([C=#'ComponentType'{tags=[T|Ts]}|Cs],Acc) ->
- check_distinct(T,Acc),
- check_distinct_tags([C#'ComponentType'{tags=Ts}|Cs],[T|Acc]);
-check_distinct_tags([#'ComponentType'{tags=[]}|_Cs],_Acc) ->
- throw({error,"Not distinct tags in SET"});
-check_distinct_tags([],_) ->
- ok.
-check_distinct(T,Acc) ->
- case lists:member(T,Acc) of
- true ->
- throw({error,"Not distinct tags in SET"});
- _ -> ok
- end.
+collect_components({C1,C2,C3}) ->
+ collect_components(C1++C2++C3);
+collect_components({C1,C2}) ->
+ collect_components(C1++C2);
+collect_components(Cs) ->
+ %% Assert that tags are not empty
+ [] = [EmptyTag || EmptyTag = #'ComponentType'{tags=[]} <- Cs],
+ Cs.
%% sorting in canonical order according to X.680 8.6, X.691 9.2
%% DER: all components shall be sorted in canonical order.
%% PER: only root components shall be sorted in canonical order. The
%% extension components shall remain in textual order.
%%
-sort_components(der,S=#state{tname=TypeName},Components) ->
+sort_components(der, S, Components) ->
{R1,Ext,R2} = extension(textual_order(Components)),
CompsList = case Ext of
noext -> R1;
@@ -5300,88 +4515,34 @@ sort_components(der,S=#state{tname=TypeName},Components) ->
end,
case {untagged_choice(S,CompsList),Ext} of
{false,noext} ->
- {true,sort_components1(S,TypeName,CompsList,[],[],[],[])};
+ {true,sort_components1(CompsList)};
{false,_} ->
- {true,{sort_components1(S,TypeName,CompsList,[],[],[],[]), []}};
+ {true,{sort_components1(CompsList),[]}};
{true,noext} ->
%% sort in run-time
{dynamic,R1};
_ ->
{dynamic,{R1, Ext, R2}}
end;
-sort_components(per,S=#state{tname=TypeName},Components) ->
+sort_components(per, S, Components) ->
{R1,Ext,R2} = extension(textual_order(Components)),
Root = tag_untagged_choice(S,R1++R2),
case Ext of
noext ->
- {true,sort_components1(S,TypeName,Root,[],[],[],[])};
+ {true,sort_components1(Root)};
_ ->
- {true,{sort_components1(S,TypeName,Root,[],[],[],[]),
- Ext}}
+ {true,{sort_components1(Root),Ext}}
end.
-sort_components1(S,TypeName,[C=#'ComponentType'{tags=[{'UNIVERSAL',_}|_R]}|Cs],
- UnivAcc,ApplAcc,ContAcc,PrivAcc) ->
- sort_components1(S,TypeName,Cs,[C|UnivAcc],ApplAcc,ContAcc,PrivAcc);
-sort_components1(S,TypeName,[C=#'ComponentType'{tags=[{'APPLICATION',_}|_R]}|Cs],
- UnivAcc,ApplAcc,ContAcc,PrivAcc) ->
- sort_components1(S,TypeName,Cs,UnivAcc,[C|ApplAcc],ContAcc,PrivAcc);
-sort_components1(S,TypeName,[C=#'ComponentType'{tags=[{'CONTEXT',_}|_R]}|Cs],
- UnivAcc,ApplAcc,ContAcc,PrivAcc) ->
- sort_components1(S,TypeName,Cs,UnivAcc,ApplAcc,[C|ContAcc],PrivAcc);
-sort_components1(S,TypeName,[C=#'ComponentType'{tags=[{'PRIVATE',_}|_R]}|Cs],
- UnivAcc,ApplAcc,ContAcc,PrivAcc) ->
- sort_components1(S,TypeName,Cs,UnivAcc,ApplAcc,ContAcc,[C|PrivAcc]);
-sort_components1(S,TypeName,[],UnivAcc,ApplAcc,ContAcc,PrivAcc) ->
- I = #'ComponentType'.tags,
- ascending_order_check(S,TypeName,sort_universal_type(UnivAcc)) ++
- ascending_order_check(S,TypeName,lists:keysort(I,ApplAcc)) ++
- ascending_order_check(S,TypeName,lists:keysort(I,ContAcc)) ++
- ascending_order_check(S,TypeName,lists:keysort(I,PrivAcc)).
-
-ascending_order_check(S,TypeName,Components) ->
- ascending_order_check1(S,TypeName,Components),
- Components.
-
-ascending_order_check1(S,TypeName,
- [C1 = #'ComponentType'{tags=[{_,T}|_]},
- C2 = #'ComponentType'{tags=[{_,T}|_]}|Rest]) ->
- asn1ct:warning("Indistinct tag ~p in SET ~p, components ~p and ~p~n",
- [T,TypeName,C1#'ComponentType'.name,C2#'ComponentType'.name],S,
- "Indistinct tag in SET"),
- ascending_order_check1(S,TypeName,[C2|Rest]);
-ascending_order_check1(S,TypeName,
- [C1 = #'ComponentType'{tags=[{'UNIVERSAL',T1}|_]},
- C2 = #'ComponentType'{tags=[{'UNIVERSAL',T2}|_]}|Rest]) ->
- case (decode_type(T1) == decode_type(T2)) of
- true ->
- asn1ct:warning("Indistinct tags ~p and ~p in"
- " SET ~p, components ~p and ~p~n",
- [T1,T2,TypeName,C1#'ComponentType'.name,
- C2#'ComponentType'.name],S,
- "Indistinct tags and in SET"),
- ascending_order_check1(S,TypeName,[C2|Rest]);
- _ ->
- ascending_order_check1(S,TypeName,[C2|Rest])
- end;
-ascending_order_check1(S,N,[_|Rest]) ->
- ascending_order_check1(S,N,Rest);
-ascending_order_check1(_,_,[]) ->
- ok.
-
-sort_universal_type(Components) ->
- List = lists:map(fun(C) ->
- #'ComponentType'{tags=[{_,T}|_]} = C,
- {decode_type(T),C}
- end,
- Components),
- SortedList = lists:keysort(1,List),
- lists:map(fun(X)->element(2,X) end,SortedList).
-
-decode_type(I) when is_integer(I) ->
- I;
-decode_type(T) ->
- asn1ct_gen_ber_bin_v2:decode_type(T).
+sort_components1(Cs0) ->
+ Cs1 = [{tag_key(Tag),C} || #'ComponentType'{tags=[Tag|_]}=C <- Cs0],
+ Cs = lists:sort(Cs1),
+ [C || {_,C} <- Cs].
+
+tag_key({'UNIVERSAL',Tag}) -> {0,Tag};
+tag_key({'APPLICATION',Tag}) -> {1,Tag};
+tag_key({'CONTEXT',Tag}) -> {2,Tag};
+tag_key({'PRIVATE',Tag}) -> {3,Tag}.
untagged_choice(_S,[#'ComponentType'{typespec=#type{tag=[],def={'CHOICE',_}}}|_Rest]) ->
true;
@@ -5477,35 +4638,43 @@ check_selectiontype(S,Name,#type{def=Eref})
{RefMod,TypeDef} = get_referenced_type(S,Eref),
NewS = S#state{module=load_asn1_module(S,RefMod),
mname=RefMod,
- type=TypeDef,
tname=get_datastr_name(TypeDef)},
check_selectiontype2(NewS,Name,TypeDef);
check_selectiontype(S,Name,Type=#type{def={pt,_,_}}) ->
- TName =
- case S#state.recordtopname of
- [] ->
- S#state.tname;
- N -> N
- end,
+ TName = case S#state.recordtopname of
+ [] -> S#state.tname;
+ N -> N
+ end,
TDef = #typedef{name=TName,typespec=Type},
check_selectiontype2(S,Name,TDef);
-check_selectiontype(S,Name,Type) ->
- Msg = lists:flatten(io_lib:format("SelectionType error: ~w < ~w must be a reference to a CHOICE.",[Name,Type])),
- error({type,Msg,S}).
+check_selectiontype(S, _Name, Type) ->
+ asn1_error(S, {illegal_choice_type, error_value(Type)}).
check_selectiontype2(S,Name,TypeDef) ->
NewS = S#state{recordtopname=get_datastr_name(TypeDef)},
- CheckedType = check_type(NewS,TypeDef,TypeDef#typedef.typespec),
- Components = get_choice_components(S,CheckedType#type.def),
- case lists:keysearch(Name,#'ComponentType'.name,Components) of
- {value,C} ->
- %% The selected type will have the tag of the selected type.
- _T = C#'ComponentType'.typespec;
-% T#type{tag=def_to_tag(NewS,T#type.def)};
- _ ->
- Msg = lists:flatten(io_lib:format("error checking SelectionType: ~w~n",[Name])),
- error({type,Msg,S})
+ Components =
+ try
+ CheckedType = check_type(NewS,TypeDef,TypeDef#typedef.typespec),
+ get_choice_components(S,CheckedType#type.def)
+ catch error:_ ->
+ asn1_error(S, {illegal_choice_type, error_value(TypeDef)})
+ end,
+ case lists:keyfind(Name, #'ComponentType'.name, Components) of
+ #'ComponentType'{typespec=TS} -> TS;
+ false -> asn1_error(S, {illegal_id, error_value(Name)})
end.
+
+
+get_choice_components(_S,{'CHOICE',Components}) when is_list(Components)->
+ Components;
+get_choice_components(_S,{'CHOICE',{C1,C2}}) when is_list(C1),is_list(C2) ->
+ C1++C2;
+get_choice_components(S,ERef=#'Externaltypereference'{}) ->
+ {_RefMod,TypeDef}=get_referenced_type(S,ERef),
+ #typedef{typespec=TS} = TypeDef,
+ get_choice_components(S,TS#type.def).
+
+
check_restrictedstring(_S,_Def,_Constr) ->
ok.
@@ -5538,7 +4707,7 @@ check_choice(S,Type,Components) when is_list(Components) ->
check_unique_tags(S, NewComps3),
complist_as_tuple(NewComps3);
Dupl ->
- throw({error,{asn1,{duplicate_choice_alternatives,Dupl}}})
+ asn1_error(S, {duplicate_identifier,error_value(hd(Dupl))})
end;
check_choice(_S,_,[]) ->
[].
@@ -5635,25 +4804,30 @@ check_unique_tags(S,C) ->
case (S#state.module)#module.tagdefault of
'AUTOMATIC' ->
case any_manual_tag(C) of
- false -> true;
- _ -> collect_and_sort_tags(C,[])
+ false ->
+ true;
+ true ->
+ check_unique_tags(S, C, [])
end;
_ ->
- collect_and_sort_tags(C,[])
+ check_unique_tags(S, C, [])
end.
-collect_and_sort_tags([C|Rest],Acc) when is_record(C,'ComponentType') ->
- collect_and_sort_tags(Rest,C#'ComponentType'.tags ++ Acc);
-collect_and_sort_tags([_|Rest],Acc) ->
- collect_and_sort_tags(Rest,Acc);
-collect_and_sort_tags([],Acc) ->
- {Dupl,_}= lists:mapfoldl(fun(El,El)->{{dup,El},El};(El,_Prev)-> {El,El} end,notag,lists:sort(Acc)),
- Dupl2 = [Dup|| {dup,Dup} <- Dupl],
- if
- length(Dupl2) > 0 ->
- throw({error,{asn1,{duplicates_of_the_tags,Dupl2}}});
- true ->
- true
+check_unique_tags(S, [#'ComponentType'{name=Name,tags=Tags0}|T], Acc) ->
+ Tags = [{Tag,Name} || Tag <- Tags0],
+ check_unique_tags(S, T, Tags ++ Acc);
+check_unique_tags(S, [_|T], Acc) ->
+ check_unique_tags(S, T, Acc);
+check_unique_tags(S, [], Acc) ->
+ R0 = sofs:relation(Acc),
+ R1 = sofs:relation_to_family(R0),
+ R2 = sofs:to_external(R1),
+ Dup = [Els || {_,[_,_|_]=Els} <- R2],
+ case Dup of
+ [] ->
+ ok;
+ [FirstDupl|_] ->
+ asn1_error(S, {duplicate_tags,FirstDupl})
end.
check_unique(L,Pos) ->
@@ -5795,28 +4969,18 @@ componentrelation_leadingattr(S,[C= #'ComponentType'{}|Cs],CompList,Acc,CompAcc)
{[],C};
[{ObjSet,Attr,N,ClassDef,_Path,ValueIndex}|_NewRest] ->
OS = object_set_mod_name(S,ObjSet),
- UniqueFieldName =
- case (catch get_unique_fieldname(S,#classdef{typespec=ClassDef})) of
- {error,'__undefined_',_} ->
- no_unique;
- {asn1,Msg,_} ->
- error({type,Msg,S});
- {'EXIT',Msg} ->
- error({type,{internal_error,Msg},S});
- {Other,_} -> Other
- end,
-% UsedFieldName = get_used_fieldname(S,Attr,STList),
+ UniqFN = get_unique_fieldname(S,
+ #classdef{typespec=ClassDef}),
%% Res should be done differently: even though
%% a unique field name exists it is not
%% certain that the ObjectClassFieldType of
%% the simple table constraint picks that
%% class field.
Res = #simpletableattributes{objectsetname=OS,
-%% c_name=asn1ct_gen:un_hyphen_var(Attr),
c_name=Attr,
c_index=N,
- usedclassfield=UniqueFieldName,
- uniqueclassfield=UniqueFieldName,
+ usedclassfield=UniqFN,
+ uniqueclassfield=UniqFN,
valueindex=ValueIndex},
{[Res],C#'ComponentType'{typespec=NewTSpec}}
end;
@@ -5869,7 +5033,7 @@ remove_doubles1(El,L) ->
NewL -> remove_doubles1(El,NewL)
end.
-%% get_simple_table_info searches the commponents Cs by the path from
+%% get_simple_table_info searches the components Cs by the path from
%% an at-list (third argument), and follows into a component of it if
%% necessary, to get information needed for code generating.
%%
@@ -5884,32 +5048,35 @@ remove_doubles1(El,L) ->
% %% at least one step below the outermost level, i.e. the leading
% %% information shall be on a sub level. 2) They don't have any common
% %% path.
-get_simple_table_info(S,Cs,[AtList|Rest]) ->
- [get_simple_table_info1(S,Cs,AtList,[])|get_simple_table_info(S,Cs,Rest)];
-get_simple_table_info(_,_,[]) ->
- [].
-get_simple_table_info1(S,Cs,[Cname|Cnames],Path) when is_list(Cs) ->
- case lists:keysearch(Cname,#'ComponentType'.name,Cs) of
- {value,C} ->
- get_simple_table_info1(S,C,Cnames,[Cname|Path]);
- _ ->
- error({type,"Missing expected simple table constraint",S})
- end;
-get_simple_table_info1(S,#'ComponentType'{typespec=TS},[],Path) ->
- %% In this component there must be a simple table constraint
- %% o.w. the asn1 code is wrong.
- #type{def=OCFT,constraint=Cnstr} = TS,
- case constraint_member(simpletable,Cnstr) of
- {true,{simpletable,_OSRef}} ->
- simple_table_info(S,OCFT,Path);
- _ ->
- error({type,{"missing expected simple table constraint",
- Cnstr},S})
+get_simple_table_info(S, Cs, AtLists) ->
+ [get_simple_table_info1(S, Cs, AtList, []) || AtList <- AtLists].
+
+get_simple_table_info1(S, Cs, [Cname|Cnames], Path) ->
+ #'ComponentType'{} = C =
+ lists:keyfind(Cname, #'ComponentType'.name, Cs),
+ get_simple_table_info2(S, C, Cnames, [Cname|Path]).
+
+get_simple_table_info2(S, #'ComponentType'{name=Name,typespec=TS}, [], Path) ->
+ OCFT = simple_table_get_ocft(S, Name, TS),
+ case lists:keymember(simpletable, 1, TS#type.constraint) of
+ true ->
+ simple_table_info(S, OCFT, Path);
+ false ->
+ asn1_error(S, {missing_table_constraint,Name})
end;
-get_simple_table_info1(S,#'ComponentType'{typespec=TS},Cnames,Path) ->
+get_simple_table_info2(S, #'ComponentType'{typespec=TS}, Cnames, Path) ->
Components = get_atlist_components(TS#type.def),
- get_simple_table_info1(S,Components,Cnames,Path).
-
+ get_simple_table_info1(S, Components, Cnames, Path).
+
+simple_table_get_ocft(_, _, #type{def=#'ObjectClassFieldType'{}=OCFT}) ->
+ OCFT;
+simple_table_get_ocft(S, Component, #type{constraint=Constr}) ->
+ case lists:keyfind(ocft, 1, Constr) of
+ {ocft,OCFT} ->
+ OCFT;
+ false ->
+ asn1_error(S, {missing_ocft,Component})
+ end.
simple_table_info(S,#'ObjectClassFieldType'{classname=ClRef,
class=ObjectClass,
@@ -5932,19 +5099,8 @@ simple_table_info(S,#'ObjectClassFieldType'{classname=ClRef,
CDef;
_ -> #classdef{typespec=ObjectClass}
end,
- UniqueName =
- case (catch get_unique_fieldname(S,ClassDef)) of
- {error,'__undefined_',_} -> no_unique;
- {asn1,Msg,_} ->
- error({type,Msg,S});
- {'EXIT',Msg} ->
- error({type,{internal_error,Msg},S});
- {Other,_} -> Other
- end,
- {lists:reverse(Path),ObjectClassFieldName,UniqueName};
-simple_table_info(S,Type,_) ->
- error({type,{"the type referenced by a componentrelation constraint must be a ObjectClassFieldType",Type},S}).
-
+ UniqueName = get_unique_fieldname(S, ClassDef),
+ {lists:reverse(Path),ObjectClassFieldName,UniqueName}.
%% any_component_relation searches for all component relation
%% constraints that refers to the actual level and returns a list of
@@ -5958,9 +5114,8 @@ simple_table_info(S,Type,_) ->
%% is found to check the validity of the at-list.
any_component_relation(S,[#'ComponentType'{name=CName,typespec=Type}|Cs],CNames,NamePath,Acc) ->
CRelPath =
- case constraint_member(componentrelation,Type#type.constraint) of
-%% [{componentrelation,_,AtNotation}] ->
- {true,{_,_,AtNotation}} ->
+ case lists:keyfind(componentrelation, 1, Type#type.constraint) of
+ {_,_,AtNotation} ->
%% Found component relation constraint, now check
%% whether this constraint is relevant for the level
%% where the search started
@@ -5969,7 +5124,7 @@ any_component_relation(S,[#'ComponentType'{name=CName,typespec=Type}|Cs],CNames,
%% simple table constraint from where the component
%% relation is found.
evaluate_atpath(S,NamePath,CNames,AtNot);
- _ ->
+ false ->
[]
end,
InnerAcc =
@@ -5991,11 +5146,11 @@ any_component_relation(S,[#'ComponentType'{name=CName,typespec=Type}|Cs],CNames,
any_component_relation(S,Cs,CNames,NamePath,InnerAcc++CRelPath++Acc);
any_component_relation(S,Type,CNames,NamePath,Acc) when is_record(Type,type) ->
CRelPath =
- case constraint_member(componentrelation,Type#type.constraint) of
- {true,{_,_,AtNotation}} ->
+ case lists:keyfind(componentrelation, 1, Type#type.constraint) of
+ {_,_,AtNotation} ->
AtNot = extract_at_notation(AtNotation),
evaluate_atpath(S,NamePath,CNames,AtNot);
- _ ->
+ false ->
[]
end,
InnerAcc =
@@ -6017,15 +5172,6 @@ any_component_relation(S,['ExtensionAdditionGroupEnd'|Cs],CNames,NamePath,Acc) -
any_component_relation(_,[],_,_,Acc) ->
Acc.
-constraint_member(componentrelation,[CRel={componentrelation,_,_}|_Rest]) ->
- {true,CRel};
-constraint_member(simpletable,[ST={simpletable,_}|_Rest]) ->
- {true,ST};
-constraint_member(Key,[_H|T]) ->
- constraint_member(Key,T);
-constraint_member(_,[]) ->
- false.
-
%% evaluate_atpath/4 finds out whether the at notation refers to the
%% search level. The list of referenced names in the AtNot list shall
%% begin with a name that exists on the level it refers to. If the
@@ -6059,9 +5205,7 @@ evaluate_atpath(S=#state{abscomppath=TopPath},NamePath,Cnames,{outermost,AtPath=
{_,[H|_T]} ->
case lists:member(H,Cnames) of
true -> [AtPathBelowTop];
- _ ->
- %% error({type,{asn1,"failed to analyze at-path",AtPath},S})
- throw({type,{asn1,"failed to analyze at-path",AtPath},S})
+ _ -> asn1_error(S, {invalid_at_path, AtPath})
end
end;
evaluate_atpath(_,_,_,_) ->
@@ -6098,23 +5242,8 @@ tuple2complist({R1,E,R2}) ->
tuple2complist(List) when is_list(List) ->
List.
-get_choice_components(_S,{'CHOICE',Components}) when is_list(Components)->
- Components;
-get_choice_components(_S,{'CHOICE',{C1,C2}}) when is_list(C1),is_list(C2) ->
- C1++C2;
-get_choice_components(S,ERef=#'Externaltypereference'{}) ->
- {_RefMod,TypeDef}=get_referenced_type(S,ERef),
- #typedef{typespec=TS} = TypeDef,
- get_choice_components(S,TS#type.def).
-
-extract_at_notation([{Level,[#'Externalvaluereference'{value=Name}|Rest]}]) ->
- {Level,[Name|extract_at_notation1(Rest)]};
-extract_at_notation(At) ->
- exit({error,{asn1,{at_notation,At}}}).
-extract_at_notation1([#'Externalvaluereference'{value=Name}|Rest]) ->
- [Name|extract_at_notation1(Rest)];
-extract_at_notation1([]) ->
- [].
+extract_at_notation([{Level,ValueRefs}]) ->
+ {Level,[Name || #'Externalvaluereference'{value=Name} <- ValueRefs]}.
%% componentrelation1/1 identifies all componentrelation constraints
%% that exist in C or in the substructure of C. Info about the found
@@ -6133,8 +5262,8 @@ componentrelation1(S,C = #type{def=Def,constraint=Constraint,tablecinf=TCI},
Ret =
% case Constraint of
% [{componentrelation,{_,_,ObjectSet},AtList}|_Rest] ->
- case constraint_member(componentrelation,Constraint) of
- {true,{_,{_,_,ObjectSet},AtList}} ->
+ case lists:keyfind(componentrelation, 1, Constraint) of
+ {_,{_,_,ObjectSet},AtList} ->
[{_,AL=[#'Externalvaluereference'{}|_R1]}|_R2] = AtList,
%% Note: if Path is longer than one,i.e. it is within
%% an inner type of the actual level, then the only
@@ -6145,7 +5274,7 @@ componentrelation1(S,C = #type{def=Def,constraint=Constraint,tablecinf=TCI},
lists:map(fun(#'Externalvaluereference'{value=V})->V end,
AL),
{[{ObjectSet,AtPath,ClassDef,Path}],Def};
- _ ->
+ false ->
%% check the inner type of component
innertype_comprel(S,Def,Path)
end,
@@ -6219,10 +5348,8 @@ componentlist_comprel(_,[],Acc,_,NewCL) ->
innertype_comprel1(S,T = #type{def=Def,constraint=Cons,tablecinf=TCI},Path) ->
Ret =
-% case Cons of
-% [{componentrelation,{_,_,ObjectSet},AtList}|_Rest] ->
- case constraint_member(componentrelation,Cons) of
- {true,{_,{_,_,ObjectSet},AtList}} ->
+ case lists:keyfind(componentrelation, 1, Cons) of
+ {_,{_,_,ObjectSet},AtList} ->
%% This AtList must have an "outermost" at sign to be
%% relevent here.
[{_,AL=[#'Externalvaluereference'{value=_Attr}|_R1]}|_R2]
@@ -6233,7 +5360,7 @@ innertype_comprel1(S,T = #type{def=Def,constraint=Cons,tablecinf=TCI},Path) ->
lists:map(fun(#'Externalvaluereference'{value=V})->V end,
AL),
[{ObjectSet,AtPath,ClassDef,Path}];
- _ ->
+ false ->
innertype_comprel(S,Def,Path)
end,
case Ret of
@@ -6301,8 +5428,7 @@ value_match(S,#'ComponentType'{typespec=Type},Name,[At|Ats],Acc) ->
InnerType = asn1ct_gen:get_inner(Type#type.def),
Components =
case get_atlist_components(Type#type.def) of
- [] -> error({type,{asn1,"element in at list must be a "
- "SEQUENCE, SET or CHOICE.",Name},S});
+ [] -> asn1_error(S, {invalid_element, Name});
Comps -> Comps
end,
{Index,ValueIndex} = component_value_index(S,InnerType,At,Components),
@@ -6322,29 +5448,27 @@ component_index1(_S,Name,[#'ComponentType'{name=Name}|_Cs],N) ->
component_index1(S,Name,[_C|Cs],N) ->
component_index1(S,Name,Cs,N+1);
component_index1(S,Name,[],_) ->
- error({type,{asn1,"component of at-list was not"
- " found in substructure",Name},S}).
+ asn1_error(S, {invalid_at_list, Name}).
-get_unique_fieldname(_S,ClassDef) when is_record(ClassDef,classdef) ->
-%% {_,Fields,_} = ClassDef#classdef.typespec,
- Fields = (ClassDef#classdef.typespec)#objectclass.fields,
- get_unique_fieldname1(Fields,[]);
+get_unique_fieldname(S, #classdef{typespec=TS}) ->
+ Fields = TS#objectclass.fields,
+ get_unique_fieldname1(S, Fields, []);
get_unique_fieldname(S,#typedef{typespec=#type{def=ClassRef}}) ->
%% A class definition may be referenced as
%% REFED-CLASS ::= DEFINED-CLASS and then REFED-CLASS is a typedef
{_M,ClassDef} = get_referenced_type(S,ClassRef),
get_unique_fieldname(S,ClassDef).
-get_unique_fieldname1([],[]) ->
- throw({error,'__undefined_',[]});
-get_unique_fieldname1([],[Name]) ->
- Name;
-get_unique_fieldname1([],Acc) ->
- throw({asn1,'only one UNIQUE field is allowed in CLASS',Acc});
-get_unique_fieldname1([{fixedtypevaluefield,Name,_,'UNIQUE',Opt}|Rest],Acc) ->
- get_unique_fieldname1(Rest,[{Name,Opt}|Acc]);
-get_unique_fieldname1([_H|T],Acc) ->
- get_unique_fieldname1(T,Acc).
+get_unique_fieldname1(S, [{fixedtypevaluefield,Name,_,'UNIQUE',Opt}|T], Acc) ->
+ get_unique_fieldname1(S, T, [{Name,Opt}|Acc]);
+get_unique_fieldname1(S, [_|T], Acc) ->
+ get_unique_fieldname1(S, T, Acc);
+get_unique_fieldname1(S, [], Acc) ->
+ case Acc of
+ [] -> no_unique;
+ [Name] -> Name;
+ [_|_] -> asn1_error(S, multiple_uniqs)
+ end.
get_tableconstraint_info(S,Type,{CheckedTs,EComps,CheckedTs2}) ->
{get_tableconstraint_info(S,Type,CheckedTs,[]),
@@ -6400,31 +5524,8 @@ get_tableconstraint_info(S,Type,[C|Cs],Acc) ->
get_referenced_fieldname([{_,FirstFieldname}]) ->
{FirstFieldname,[]};
-get_referenced_fieldname([{_,FirstFieldname}|Rest]) ->
- {FirstFieldname,lists:map(fun(X)->element(2,X) end,Rest)};
-get_referenced_fieldname(Def={FieldName,RestFieldName}) when is_atom(FieldName),is_list(RestFieldName)->
- Def;
-get_referenced_fieldname(Def) ->
- {no_type,Def}.
-
-%% get_ObjectClassFieldType extracts the type from the chain of
-%% objects that leads to a final type.
-get_ObjectClassFieldType(S,ERef,PrimFieldNameList) when
- is_record(ERef,'Externaltypereference') ->
- {MName,Type} = get_referenced_type(S,ERef),
- NewS = update_state(S#state{type=Type,
- tname=ERef#'Externaltypereference'.type},MName),
- ClassSpec = check_class(NewS,Type),
- Fields = ClassSpec#objectclass.fields,
- get_ObjectClassFieldType(S,Fields,PrimFieldNameList);
-get_ObjectClassFieldType(S,Fields,L=[_PrimFieldName1|_Rest]) ->
- check_PrimitiveFieldNames(S,Fields,L),
- get_OCFType(S,Fields,L);
-get_ObjectClassFieldType(S,ERef,{FieldName,Rest}) ->
- get_ObjectClassFieldType(S,ERef,Rest ++ [FieldName]).
-
-check_PrimitiveFieldNames(_S,_Fields,_) ->
- ok.
+get_referenced_fieldname([{_,FirstFieldname}|T]) ->
+ {FirstFieldname,[element(2, X) || X <- T]}.
%% get_ObjectClassFieldType_classdef gets the def of the class of the
%% ObjectClassFieldType, i.e. the objectclass record. If the type has
@@ -6445,15 +5546,13 @@ get_OCFType(S,Fields,[PrimFieldName|Rest]) ->
{fixedtypevaluefield,PrimFieldName,Type};
{value,{objectfield,_,ClassRef,_Unique,_OptSpec}} ->
{MName,ClassDef} = get_referenced_type(S,ClassRef),
- NewS = update_state(S#state{type=ClassDef,
- tname=get_datastr_name(ClassDef)},
+ NewS = update_state(S#state{tname=get_datastr_name(ClassDef)},
MName),
CheckedCDef = check_class(NewS,ClassDef),
get_OCFType(S,CheckedCDef#objectclass.fields,Rest);
{value,{objectsetfield,_,Type,_OptSpec}} ->
{MName,ClassDef} = get_referenced_type(S,Type#type.def),
- NewS = update_state(S#state{type=ClassDef,
- tname=get_datastr_name(ClassDef)},
+ NewS = update_state(S#state{tname=get_datastr_name(ClassDef)},
MName),
CheckedCDef = check_class(NewS,ClassDef),
get_OCFType(S,CheckedCDef#objectclass.fields,Rest);
@@ -6461,7 +5560,7 @@ get_OCFType(S,Fields,[PrimFieldName|Rest]) ->
{value,Other} ->
{element(1,Other),PrimFieldName};
_ ->
- throw({error,lists:flatten(io_lib:format("undefined FieldName in ObjectClassFieldType: ~w",[PrimFieldName]))})
+ asn1_error(S, {illegal_object_field, PrimFieldName})
end.
get_taglist(S,Ext) when is_record(Ext,'Externaltypereference') ->
@@ -6485,30 +5584,8 @@ get_taglist(_S,#'ObjectClassFieldType'{type={typefield,_}}) ->
[];
get_taglist(S,#'ObjectClassFieldType'{type={fixedtypevaluefield,_,Type}}) ->
get_taglist(S,Type);
-get_taglist(S,{ERef=#'Externaltypereference'{},FieldNameList})
- when is_list(FieldNameList) ->
- case get_ObjectClassFieldType(S,ERef,FieldNameList) of
- {fixedtypevaluefield,_,Type} -> get_taglist(S,Type);
- {TypeFieldName,_} when is_atom(TypeFieldName) -> []%should check if allowed
- end;
-get_taglist(S,{ObjCl,FieldNameList}) when is_record(ObjCl,objectclass),
- is_list(FieldNameList) ->
- case get_ObjectClassFieldType(S,ObjCl#objectclass.fields,FieldNameList) of
- {fixedtypevaluefield,_,Type} -> get_taglist(S,Type);
- {TypeFieldName,_} when is_atom(TypeFieldName) -> []%should check if allowed
- end;
-get_taglist(S,Def) ->
- case S#state.erule of
- ber ->
- [];
- _ ->
- case Def of
- 'ASN1_OPEN_TYPE' -> % open_type has no UNIVERSAL tag as such
- [];
- _ ->
- [asn1ct_gen:def_to_tag(Def)]
- end
- end.
+get_taglist(_, _) ->
+ [].
get_taglist1(S,[#'ComponentType'{name=_Cname,tags=TagL}|Rest]) when is_list(TagL) ->
%% tag_list has been here , just return TagL and continue with next alternative
@@ -6565,15 +5642,6 @@ get_taglist1(_S,[]) ->
%% tag_number('CHARACTER STRING') -> 29;
%% tag_number('BMPString') -> 30.
-
-dbget_ex(_S,Module,Key) ->
- case asn1_db:dbget(Module,Key) of
- undefined ->
-
- throw({error,{asn1,{undefined,{Module,Key}}}}); % this is catched on toplevel type or value
- T -> T
- end.
-
merge_tags(T1, T2) when is_list(T2) ->
merge_tags2(T1 ++ T2, []);
merge_tags(T1, T2) ->
@@ -6590,75 +5658,46 @@ merge_tags2([H|T],Acc) ->
merge_tags2([], Acc) ->
lists:reverse(Acc).
-%% merge_constraints(C1, []) ->
-%% C1;
-%% merge_constraints([], C2) ->
-%% C2;
-%% merge_constraints(C1, C2) ->
-%% {SList,VList,PAList,Rest} = splitlist(C1++C2,[],[],[],[]),
-%% SizeC = merge_constraints(SList),
-%% ValueC = merge_constraints(VList),
-%% PermAlphaC = merge_constraints(PAList),
-%% case Rest of
-%% [] ->
-%% SizeC ++ ValueC ++ PermAlphaC;
-%% _ ->
-%% throw({error,{asn1,{not_implemented,{merge_constraints,Rest}}}})
-%% end.
-
-%% merge_constraints([]) -> [];
-%% merge_constraints([C1 = {_,{Low1,High1}},{_,{Low2,High2}}|Rest]) when Low1 >= Low2,
-%% High1 =< High2 ->
-%% merge_constraints([C1|Rest]);
-%% merge_constraints([C1={'PermittedAlphabet',_},C2|Rest]) ->
-%% [C1|merge_constraints([C2|Rest])];
-%% merge_constraints([C1 = {_,{_Low1,_High1}},C2 = {_,{_Low2,_High2}}|_Rest]) ->
-%% throw({error,asn1,{conflicting_constraints,{C1,C2}}});
-%% merge_constraints([C]) ->
-%% [C].
-
-%% splitlist([C={'SizeConstraint',_}|Rest],Sacc,Vacc,PAacc,Restacc) ->
-%% splitlist(Rest,[C|Sacc],Vacc,PAacc,Restacc);
-%% splitlist([C={'ValueRange',_}|Rest],Sacc,Vacc,PAacc,Restacc) ->
-%% splitlist(Rest,Sacc,[C|Vacc],PAacc,Restacc);
-%% splitlist([C={'PermittedAlphabet',_}|Rest],Sacc,Vacc,PAacc,Restacc) ->
-%% splitlist(Rest,Sacc,Vacc,[C|PAacc],Restacc);
-%% splitlist([C|Rest],Sacc,Vacc,PAacc,Restacc) ->
-%% splitlist(Rest,Sacc,Vacc,PAacc,[C|Restacc]);
-%% splitlist([],Sacc,Vacc,PAacc,Restacc) ->
-%% {lists:reverse(Sacc),
-%% lists:reverse(Vacc),
-%% lists:reverse(PAacc),
-%% lists:reverse(Restacc)}.
-
-
-
-storeindb(S,M) when is_record(M,module) ->
- TVlist = M#module.typeorval,
- NewM = M#module{typeorval=findtypes_and_values(TVlist)},
- asn1_db:dbnew(NewM#module.name, S#state.erule),
- asn1_db:dbput(NewM#module.name,'MODULE', NewM),
- Res = storeindb(#state{mname=NewM#module.name}, TVlist, []),
- include_default_class(S,NewM#module.name),
+storeindb(S0, #module{name=ModName,typeorval=TVlist0}=M) ->
+ S = S0#state{mname=ModName},
+ TVlist1 = [{asn1ct:get_name_of_def(Def),Def} || Def <- TVlist0],
+ case check_duplicate_defs(S, TVlist1) of
+ ok ->
+ storeindb_1(S, M, TVlist0, TVlist1);
+ {error,_}=Error ->
+ Error
+ end.
+
+storeindb_1(S, #module{name=ModName}=M, TVlist0, TVlist) ->
+ NewM = M#module{typeorval=findtypes_and_values(TVlist0)},
+ asn1_db:dbnew(ModName, S#state.erule),
+ asn1_db:dbput(ModName, 'MODULE', NewM),
+ asn1_db:dbput(ModName, TVlist),
+ include_default_class(S, NewM#module.name),
include_default_type(NewM#module.name),
- Res.
+ ok.
-storeindb(#state{mname=Module}=S, [H|T], Errors) ->
- Name = asn1ct:get_name_of_def(H),
- case asn1_db:dbget(Module, Name) of
- undefined ->
- asn1_db:dbput(Module, Name, H),
- storeindb(S, T, Errors);
- Prev ->
- PrevLine = asn1ct:get_pos_of_def(Prev),
- Error = return_asn1_error(S, H, {already_defined,Name,PrevLine}),
- storeindb(S, T, [Error|Errors])
- end;
-storeindb(_, [], []) ->
- ok;
-storeindb(_, [], [_|_]=Errors) ->
- {error,Errors}.
+check_duplicate_defs(S, Defs) ->
+ Set0 = sofs:relation(Defs),
+ Set1 = sofs:relation_to_family(Set0),
+ Set = sofs:to_external(Set1),
+ case [duplicate_def(S, N, Dup) || {N,[_,_|_]=Dup} <- Set] of
+ [] ->
+ ok;
+ [_|_]=E ->
+ {error,lists:append(E)}
+ end.
+
+duplicate_def(S, Name, Dups0) ->
+ Dups1 = [{asn1ct:get_pos_of_def(Def),Def} || Def <- Dups0],
+ [{Prev,_}|Dups] = lists:sort(Dups1),
+ duplicate_def_1(S, Dups, Name, Prev).
+duplicate_def_1(S, [{_,Def}|T], Name, Prev) ->
+ E = return_asn1_error(S, Def, {already_defined,Name,Prev}),
+ [E|duplicate_def_1(S, T, Name, Prev)];
+duplicate_def_1(_, [], _, _) ->
+ [].
findtypes_and_values(TVList) ->
findtypes_and_values(TVList,[],[],[],[],[],[]).%% Types,Values,
@@ -6698,99 +5737,146 @@ findtypes_and_values([],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc) ->
{lists:reverse(Tacc),lists:reverse(Vacc),lists:reverse(Pacc),
lists:reverse(Cacc),lists:reverse(Oacc),lists:reverse(OSacc)}.
+return_asn1_error(#state{error_context=Context}=S, Error) ->
+ return_asn1_error(S, Context, Error).
+
return_asn1_error(#state{mname=Where}, Item, Error) ->
Pos = asn1ct:get_pos_of_def(Item),
{structured_error,{Where,Pos},?MODULE,Error}.
-asn1_error(S, Item, Error) ->
- throw({error,return_asn1_error(S, Item, Error)}).
+asn1_error(S, Error) ->
+ throw({error,return_asn1_error(S, Error)}).
format_error({already_defined,Name,PrevLine}) ->
io_lib:format("the name ~p has already been defined at line ~p",
[Name,PrevLine]);
+format_error({duplicate_identifier,Ids}) ->
+ io_lib:format("the identifier '~p' has already been used", [Ids]);
+format_error({duplicate_tags,Elements}) ->
+ io_lib:format("duplicate tags in the elements: ~s",
+ [format_elements(Elements)]);
+format_error({enum_illegal_redefinition,Id}) ->
+ io_lib:format("'~s' must not be redefined", [Id]);
+format_error({enum_not_ascending,Id,N,Prev}) ->
+ io_lib:format("the values for enumerations which follow '...' must "
+ "be in ascending order, but '~p(~p)' is less than the "
+ "previous value '~p'", [Id,N,Prev]);
+format_error({enum_reused_value,Id,Val}) ->
+ io_lib:format("'~s' has the value '~p' which is used more than once",
+ [Id,Val]);
+format_error({illegal_id, Id}) ->
+ io_lib:format("illegal identifier: ~p", [Id]);
+format_error({illegal_choice_type, Ref}) ->
+ io_lib:format("expecting a CHOICE type: ~p", [Ref]);
+format_error({illegal_class_name,Class}) ->
+ io_lib:format("the class name '~s' is illegal (it must start with an uppercase letter and only contain uppercase letters, digits, or hyphens)", [Class]);
+format_error({illegal_COMPONENTS_OF, Ref}) ->
+ io_lib:format("expected a SEQUENCE or SET got: ~p", [Ref]);
+format_error(illegal_external_value) ->
+ "illegal value in EXTERNAL type";
format_error({illegal_instance_of,Class}) ->
io_lib:format("using INSTANCE OF on class '~s' is illegal, "
- "because INSTANCE OF may only be used on the class TYPE-IDENTFIER",
+ "because INSTANCE OF may only be used on the class TYPE-IDENTIFIER",
[Class]);
+format_error(illegal_integer_value) ->
+ "expecting an integer value";
+format_error(illegal_object) ->
+ "expecting an object";
+format_error({illegal_object_field, Id}) ->
+ io_lib:format("expecting a class field: ~p",[Id]);
+format_error({illegal_oid,o_id}) ->
+ "illegal OBJECT IDENTIFIER";
+format_error({illegal_oid,rel_oid}) ->
+ "illegal RELATIVE-OID";
format_error(illegal_octet_string_value) ->
"expecting a bstring or an hstring as value for an OCTET STRING";
format_error({illegal_typereference,Name}) ->
io_lib:format("'~p' is used as a typereference, but does not start with an uppercase letter", [Name]);
+format_error(illegal_table_constraint) ->
+ "table constraints may only be applied to CLASS.&field constructs";
+format_error(illegal_value) ->
+ "expecting a value";
+format_error({illegal_value, TYPE}) ->
+ io_lib:format("expecting a ~s value", [TYPE]);
format_error({invalid_fields,Fields,Obj}) ->
io_lib:format("invalid ~s in ~p", [format_fields(Fields),Obj]);
format_error({invalid_bit_number,Bit}) ->
io_lib:format("the bit number '~p' is invalid", [Bit]);
+format_error(invalid_table_constraint) ->
+ "the table constraint is not an object set";
+format_error(invalid_objectset) ->
+ "expecting an object set";
+format_error({implicit_tag_before,Kind}) ->
+ "illegal implicit tag before " ++
+ case Kind of
+ choice -> "'CHOICE'";
+ open_type -> "open type"
+ end;
format_error({missing_mandatory_fields,Fields,Obj}) ->
io_lib:format("missing mandatory ~s in ~p",
[format_fields(Fields),Obj]);
+format_error({missing_table_constraint,Component}) ->
+ io_lib:format("the component '~s' is referenced by a component relation constraint using the '@field-name' notation, but does not have a table constraint",
+ [Component]);
+format_error({missing_id,Id}) ->
+ io_lib:format("expected the mandatory component '~p'", [Id]);
+format_error({missing_ocft,Component}) ->
+ io_lib:format("the component '~s' must be an ObjectClassFieldType (CLASSNAME.&field-name)", [Component]);
+format_error(multiple_uniqs) ->
+ "implementation limitation: only one UNIQUE field is allowed in CLASS";
format_error({namelist_redefinition,Name}) ->
io_lib:format("the name '~s' can not be redefined", [Name]);
+format_error({param_bad_type, Ref}) ->
+ io_lib:format("'~p' is not a parameterized type", [Ref]);
+format_error(param_wrong_number_of_arguments) ->
+ "wrong number of arguments";
+format_error(reversed_range) ->
+ "ranges must be given in increasing order";
+format_error({syntax_duplicated_fields,Fields}) ->
+ io_lib:format("~s must only occur once in the syntax list",
+ [format_fields(Fields)]);
+format_error(syntax_nomatch) ->
+ "unexpected end of object definition";
+format_error({syntax_mandatory_in_optional_group,Name}) ->
+ io_lib:format("the field '&~s' must not be within an optional group since it is not optional",
+ [Name]);
+format_error({syntax_missing_mandatory_fields,Fields}) ->
+ io_lib:format("missing mandatory ~s in the syntax list",
+ [format_fields(Fields)]);
+format_error({syntax_nomatch,Actual}) ->
+ io_lib:format("~s is not the next item allowed according to the defined syntax",
+ [Actual]);
+format_error({syntax_undefined_field,Field}) ->
+ io_lib:format("'&~s' is not a field of the class being defined",
+ [Field]);
format_error({undefined,Name}) ->
io_lib:format("'~s' is referenced, but is not defined", [Name]);
+format_error({undefined_export,Ref}) ->
+ io_lib:format("'~s' is exported but is not defined", [Ref]);
+format_error({undefined_field,FieldName}) ->
+ io_lib:format("the field '&~s' is undefined", [FieldName]);
format_error({undefined_import,Ref,Module}) ->
io_lib:format("'~s' is not exported from ~s", [Ref,Module]);
+format_error({unique_and_default,Field}) ->
+ io_lib:format("the field '&~s' must not have both 'UNIQUE' and 'DEFAULT'",
+ [Field]);
format_error({value_reused,Val}) ->
io_lib:format("the value '~p' is used more than once", [Val]);
+format_error({non_unique_object,Id}) ->
+ io_lib:format("object set with a UNIQUE field value of '~p' is used more than once", [Id]);
format_error(Other) ->
io_lib:format("~p", [Other]).
format_fields([F]) ->
- io_lib:format("field &~s", [F]);
+ io_lib:format("field '&~s'", [F]);
format_fields([H|T]) ->
- [io_lib:format("fields &~s", [H])|
- [io_lib:format(", &~s", [F]) || F <- T]].
-
-error({_,{structured_error,_,_,_}=SE,_}) ->
- SE;
-error({export,Msg,#state{mname=Mname,type=Ref,tname=Typename}}) ->
- Pos = Ref#'Externaltypereference'.pos,
- io:format("asn1error:~p:~p:~p~n~p~n",[Pos,Mname,Typename,Msg]),
- {error,{export,Pos,Mname,Typename,Msg}};
-% error({type,{Msg1,Msg2},#state{mname=Mname,type=Type,tname=Typename}})
-% when is_record(Type,typedef) ->
-% io:format("asn1error:~p:~p:~p ~p~n",
-% [Type#typedef.pos,Mname,Typename,Msg1]),
-% {error,{type,Type#typedef.pos,Mname,Typename,Msg1,Msg2}};
-error({type,Msg,#state{mname=Mname,type=Type,tname=Typename}})
- when is_record(Type,type) ->
- io:format("asn1error:~p:~p~n~p~n",
- [Mname,Typename,Msg]),
- {error,{type,Mname,Typename,Msg}};
-error({type,Msg,#state{mname=Mname,type=Type,tname=Typename}})
- when is_record(Type,typedef) ->
- io:format("asn1error:~p:~p:~p~n~p~n",
- [Type#typedef.pos,Mname,Typename,Msg]),
- {error,{type,Type#typedef.pos,Mname,Typename,Msg}};
-error({type,Msg,#state{mname=Mname,type=Type,tname=Typename}})
- when is_record(Type,ptypedef) ->
- io:format("asn1error:~p:~p:~p~n~p~n",
- [Type#ptypedef.pos,Mname,Typename,Msg]),
- {error,{type,Type#ptypedef.pos,Mname,Typename,Msg}};
-error({type,Msg,#state{mname=Mname,value=Value,vname=Valuename}})
- when is_record(Value,valuedef) ->
- io:format("asn1error:~p:~p:~p~n~p~n",[Value#valuedef.pos,Mname,Valuename,Msg]),
- {error,{type,Value#valuedef.pos,Mname,Valuename,Msg}};
-error({type,Msg,#state{mname=Mname,type=Type,tname=Typename}})
- when is_record(Type,pobjectdef) ->
- io:format("asn1error:~p:~p:~p~n~p~n",
- [Type#pobjectdef.pos,Mname,Typename,Msg]),
- {error,{type,Type#pobjectdef.pos,Mname,Typename,Msg}};
-error({value,Msg,#state{mname=Mname,value=Value,vname=Valuename}})
- when is_record(Value,valuedef) ->
- io:format("asn1error:~p:~p:~p~n~p~n",[Value#valuedef.pos,Mname,Valuename,Msg]),
- {error,{value,Value#valuedef.pos,Mname,Valuename,Msg}};
-error({Other,Msg,#state{mname=Mname,value=#valuedef{pos=Pos},vname=Valuename}}) ->
- io:format("asn1error:~p:~p:~p~n~p~n",[Pos,Mname,Valuename,Msg]),
- {error,{Other,Pos,Mname,Valuename,Msg}};
-error({Other,Msg,#state{mname=Mname,type=#typedef{pos=Pos},tname=Typename}}) ->
- io:format("asn1error:~p:~p:~p~n~p~n",[Pos,Mname,Typename,Msg]),
- {error,{Other,Pos,Mname,Typename,Msg}};
-error({Other,Msg,#state{mname=Mname,type=#classdef{pos=Pos},tname=Typename}}) ->
- io:format("asn1error:~p:~p:~p~n~p~n",[Pos,Mname,Typename,Msg]),
- {error,{Other,Pos,Mname,Typename,Msg}};
-error({Other,Msg,#state{mname=Mname,type=Type,tname=Typename}}) ->
- io:format("asn1error:~p:~p:~p~n~p~n",[asn1ct:get_pos_of_def(Type),Mname,Typename,Msg]),
- {error,{Other,asn1ct:get_pos_of_def(Type),Mname,Typename,Msg}}.
+ [io_lib:format("fields '&~s'", [H])|
+ [io_lib:format(", '&~s'", [F]) || F <- T]].
+
+format_elements([H1,H2|T]) ->
+ [io_lib:format("~p, ", [H1])|format_elements([H2|T])];
+format_elements([H]) ->
+ io_lib:format("~p", [H]).
include_default_type(Module) ->
NameAbsList = default_type_list(),
@@ -6953,62 +6039,62 @@ default_type_list() ->
].
-include_default_class(S,Module) ->
- NameAbsList = default_class_list(S),
- include_default_class1(Module,NameAbsList).
+include_default_class(S, Module) ->
+ _ = [include_default_class1(S, Module, ClassDef) ||
+ ClassDef <- default_class_list()],
+ ok.
-include_default_class1(_,[]) ->
- ok;
-include_default_class1(Module,[{Name,TS}|Rest]) ->
- case asn1_db:dbget(Module,Name) of
+include_default_class1(S, Module, {Name,Ts0}) ->
+ case asn1_db:dbget(Module, Name) of
undefined ->
- C = #classdef{checked=true,module=Module,name=Name,
- typespec=TS},
- asn1_db:dbput(Module,Name,C);
- _ -> ok
- end,
- include_default_class1(Module,Rest).
+ #objectclass{fields=Fields,
+ syntax={'WITH SYNTAX',Syntax0}} = Ts0,
+ Syntax = preprocess_syntax(S, Syntax0, Fields),
+ Ts = Ts0#objectclass{syntax={preprocessed_syntax,Syntax}},
+ C = #classdef{checked=true,module=Module,
+ name=Name,typespec=Ts},
+ asn1_db:dbput(Module, Name, C);
+ _ ->
+ ok
+ end.
-default_class_list(S) ->
+default_class_list() ->
[{'TYPE-IDENTIFIER',
- {objectclass,
- [{fixedtypevaluefield,
- id,
- #type{tag=?TAG_PRIMITIVE(?N_OBJECT_IDENTIFIER),
- def='OBJECT IDENTIFIER'},
- 'UNIQUE',
- 'MANDATORY'},
- {typefield,'Type','MANDATORY'}],
- {'WITH SYNTAX',
- [{typefieldreference,'Type'},
- 'IDENTIFIED',
- 'BY',
- {valuefieldreference,id}]}}},
+ #objectclass{fields=[{fixedtypevaluefield,
+ id,
+ #type{tag=[?TAG_PRIMITIVE(?N_OBJECT_IDENTIFIER)],
+ def='OBJECT IDENTIFIER'},
+ 'UNIQUE',
+ 'MANDATORY'},
+ {typefield,'Type','MANDATORY'}],
+ syntax={'WITH SYNTAX',
+ [{typefieldreference,'Type'},
+ 'IDENTIFIED',
+ 'BY',
+ {valuefieldreference,id}]}}},
{'ABSTRACT-SYNTAX',
- {objectclass,
- [{fixedtypevaluefield,
- id,
- #type{tag=?TAG_PRIMITIVE(?N_OBJECT_IDENTIFIER),
- def='OBJECT IDENTIFIER'},
- 'UNIQUE',
- 'MANDATORY'},
- {typefield,'Type','MANDATORY'},
- {fixedtypevaluefield,
- property,
- #type{tag=?TAG_PRIMITIVE(?N_BIT_STRING),
- def={'BIT STRING',[]}},
- undefined,
- {'DEFAULT',
- [0,1,0]}}],
- {'WITH SYNTAX',
- [{typefieldreference,'Type'},
- 'IDENTIFIED',
- 'BY',
- {valuefieldreference,id},
- ['HAS',
- 'PROPERTY',
- {valuefieldreference,property}]]}}}].
-
+ #objectclass{fields=[{fixedtypevaluefield,
+ id,
+ #type{tag=[?TAG_PRIMITIVE(?N_OBJECT_IDENTIFIER)],
+ def='OBJECT IDENTIFIER'},
+ 'UNIQUE',
+ 'MANDATORY'},
+ {typefield,'Type','MANDATORY'},
+ {fixedtypevaluefield,
+ property,
+ #type{tag=[?TAG_PRIMITIVE(?N_BIT_STRING)],
+ def={'BIT STRING',[]}},
+ undefined,
+ {'DEFAULT',
+ [0,1,0]}}],
+ syntax={'WITH SYNTAX',
+ [{typefieldreference,'Type'},
+ 'IDENTIFIED',
+ 'BY',
+ {valuefieldreference,id},
+ ['HAS',
+ 'PROPERTY',
+ {valuefieldreference,property}]]}}}].
new_reference_name(Name) ->
case get(asn1_reference) of
@@ -7037,8 +6123,9 @@ insert_once(S,Tab,Key) ->
skipped
end.
-check_fold(S, [H|T], Check) ->
- Type = asn1_db:dbget(S#state.mname, H),
+check_fold(S0, [H|T], Check) ->
+ Type = asn1_db:dbget(S0#state.mname, H),
+ S = S0#state{error_context=Type},
case Check(S, H, Type) of
ok ->
check_fold(S, T, Check);
@@ -7047,5 +6134,19 @@ check_fold(S, [H|T], Check) ->
end;
check_fold(_, [], Check) when is_function(Check, 3) -> [].
+error_value(Value) when is_integer(Value) -> Value;
+error_value(Value) when is_atom(Value) -> Value;
+error_value(#type{def=Value}) when is_atom(Value) -> Value;
+error_value(#type{def=Value}) -> error_value(Value);
+error_value(RefOrType) ->
+ try name_of_def(RefOrType) of
+ Name -> Name
+ catch _:_ ->
+ case get_datastr_name(RefOrType) of
+ undefined -> RefOrType;
+ Name -> Name
+ end
+ end.
+
name_of_def(#'Externaltypereference'{type=N}) -> N;
name_of_def(#'Externalvaluereference'{value=N}) -> N.
diff --git a/lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl b/lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl
index 5fadd0495a..820d19b85c 100644
--- a/lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl
+++ b/lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2002-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2002-2014. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -234,7 +234,7 @@ gen_decode_sequence(Erules,Typename,D) when is_record(D,type) ->
asn1ct_name:new(rb),
emit([" {'",RecordName,"'}.",nl,nl]);
{LeadingAttrTerm,PostponedDecArgs} ->
- emit([com,nl,nl]),
+ emit([nl]),
case {LeadingAttrTerm,PostponedDecArgs} of
{[],[]} ->
ok;
@@ -413,7 +413,7 @@ gen_decode_set(Erules,Typename,D) when is_record(D,type) ->
%% return value as record
emit([" {'",RecordName,"'}.",nl]);
{LeadingAttrTerm,PostponedDecArgs} ->
- emit([com,nl,nl]),
+ emit([nl]),
case {LeadingAttrTerm,PostponedDecArgs} of
{[],[]} ->
ok;
@@ -617,18 +617,20 @@ gen_dec_sequence_call1(Erules,TopType,[#'ComponentType'{name=Cname,typespec=Type
{LA,PostponedDec} =
gen_dec_component(Erules,TopType,Cname,Tags,Type,Num,Prop,
Ext,DecObjInf),
+ emit([com,nl]),
case Rest of
[] ->
{LA ++ LeadingAttrAcc,PostponedDec ++ ArgsAcc};
_ ->
- emit([com,nl]),
asn1ct_name:new(bytes),
gen_dec_sequence_call1(Erules,TopType,Rest,Num+1,Ext,DecObjInf,
LA++LeadingAttrAcc,PostponedDec++ArgsAcc)
end;
gen_dec_sequence_call1(_Erules,_TopType,[],1,_,_,_,_) ->
- no_terms.
+ no_terms;
+gen_dec_sequence_call1(_, _, [], _Num, _, _, LA, PostponedDec) ->
+ {LA, PostponedDec}.
gen_dec_sequence_call2(_Erules,_TopType, {[], [], []}, _Ext,_DecObjInf) ->
no_terms;
@@ -643,7 +645,6 @@ gen_dec_sequence_call2(Erules,TopType,{Root1,EList,Root2},_Ext,DecObjInf) ->
%% TagList is the tags of Root2 elements from the first up to and
%% including the first mandatory element.
TagList = get_root2_taglist(Root2,[]),
- emit({com,nl}),
emit([{curr,tlv}," = ",
{call,ber,skip_ExtensionAdditions,
[{prev,tlv},{asis,TagList}]},com,nl]),
diff --git a/lib/asn1/src/asn1ct_constructed_per.erl b/lib/asn1/src/asn1ct_constructed_per.erl
index a91404ed54..0bc6688a49 100644
--- a/lib/asn1/src/asn1ct_constructed_per.erl
+++ b/lib/asn1/src/asn1ct_constructed_per.erl
@@ -410,12 +410,11 @@ gen_dec_open_type(Erule, Val, {Xmod,Xtype}, LeadingAttr,
#classdef{typespec=ClassDef} = asn1_db:dbget(ClMod, ClType),
#objectclass{fields=ClassFields} = ClassDef,
Extensible = lists:member('EXTENSIONMARK', ObjSet1),
- ObjSet2 = [{Key,fix_object_code(Name, Code, ClassFields)} ||
- {_,Key,Code} <- ObjSet1],
- ObjSet = lists:sort([P || {_,B}=P <- ObjSet2, B =/= none]),
+ Typename = [Name,ClType],
+ ObjSet = index_object_set(Erule, ClType, Name,
+ ObjSet1, ClassFields),
Key = erlang:md5(term_to_binary({decode,ObjSet,RestFieldNames,
Prop,Extensible})),
- Typename = [Name,ClType],
Gen = fun(_Fd, N) ->
dec_objset_optional(N, Prop),
dec_objset(Erule, N, ObjSet, RestFieldNames, Typename),
@@ -467,46 +466,15 @@ dec_objset_2(Erule, Obj, RestFields0, Typename) ->
Imm = asn1ct_gen_per:gen_dec_imm(Erule, Type),
{Term,_} = asn1ct_imm:dec_slim_cg(Imm, 'Bytes'),
emit([com,nl,Term]);
- #typedef{name={constructed,bif},typespec=Def} ->
- InnerType = asn1ct_gen:get_inner(Def#type.def),
- case InnerType of
- 'CHOICE' ->
- asn1ct_name:start(),
- asn1ct_name:new(bytes),
- {'CHOICE',CompList} = Def#type.def,
- Ext = extensible_enc(CompList),
- emit(["{Result,_} = begin",nl]),
- gen_dec_choice(Erule, Typename, CompList, Ext),
- emit([nl,
- "end",com,nl,
- "Result"]);
- 'SET' ->
- Imm0 = gen_dec_constructed_imm(Erule, Typename, Def),
- Imm = opt_imm(Imm0),
- asn1ct_name:start(),
- emit(["{Result,_} = begin",nl]),
- emit_gen_dec_imm(Imm),
- emit([nl,
- "end",com,nl,
- "Result"]);
- 'SET OF' ->
- asn1ct_name:start(),
- do_gen_decode_sof(Erule, Typename, 'SET OF',
- Def, false);
- 'SEQUENCE' ->
- Imm0 = gen_dec_constructed_imm(Erule, Typename, Def),
- Imm = opt_imm(Imm0),
- asn1ct_name:start(),
- emit(["{Result,_} = begin",nl]),
- emit_gen_dec_imm(Imm),
- emit([nl,
- "end",com,nl,
- "Result"]);
- 'SEQUENCE OF' ->
- asn1ct_name:start(),
- do_gen_decode_sof(Erule, Typename, 'SEQUENCE OF',
- Def, false)
- end;
+ #typedef{name={constructed,bif},typespec=Type}=Def ->
+ Prefix = "dec_outlined_",
+ Key = {dec_outlined,Def},
+ Gen = fun(_Fd, Name) ->
+ gen_dec_obj(Erule, Name, Typename, Type)
+ end,
+ Func = asn1ct_func:call_gen(Prefix, Key, Gen),
+ emit(["{Term,_} = ",{asis,Func},"(Bytes)",com,nl,
+ "Term"]);
#typedef{name=Type} ->
emit(["{Result,_} = ",{asis,enc_func("dec_", Type)},"(Bytes),",nl,
"Result"]);
@@ -531,6 +499,12 @@ dec_objset_2(Erule, Obj, RestFields0, Typename) ->
end
end.
+gen_dec_obj(Erules, Name, Typename, Type) ->
+ emit([{asis,Name},"(Bytes) ->",nl]),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ asn1ct_gen:gen_decode_constructed(Erules, Typename,
+ InnerType, Type).
+
gen_encode_choice(Erule, TopType, D) ->
asn1ct_name:start(),
Imm = gen_encode_choice_imm(Erule, TopType, D),
@@ -595,10 +569,10 @@ gen_encode_sof_imm(Erule, Typename, SeqOrSetOf, #type{}=D) ->
gen_decode_sof(Erules, Typename, SeqOrSetOf, #type{}=D) ->
asn1ct_name:start(),
- do_gen_decode_sof(Erules, Typename, SeqOrSetOf, D, true),
+ do_gen_decode_sof(Erules, Typename, SeqOrSetOf, D),
emit([".",nl,nl]).
-do_gen_decode_sof(Erules, Typename, SeqOrSetOf, D, NeedRest) ->
+do_gen_decode_sof(Erules, Typename, SeqOrSetOf, D) ->
{_SeqOrSetOf,ComponentType} = D#type.def,
SizeConstraint = asn1ct_imm:effective_constraint(bitstring,
D#type.constraint),
@@ -610,12 +584,11 @@ do_gen_decode_sof(Erules, Typename, SeqOrSetOf, D, NeedRest) ->
""
end,
{Num,Buf} = gen_decode_length(SizeConstraint, Erules),
- Key = erlang:md5(term_to_binary({Typename,SeqOrSetOf,
- ComponentType,NeedRest})),
+ Key = erlang:md5(term_to_binary({Typename,SeqOrSetOf,ComponentType})),
Gen = fun(_Fd, Name) ->
gen_decode_sof_components(Erules, Name,
Typename, SeqOrSetOf,
- ComponentType, NeedRest)
+ ComponentType)
end,
F = asn1ct_func:call_gen("dec_components", Key, Gen),
emit([",",nl,
@@ -629,7 +602,7 @@ gen_decode_length(Constraint, Erule) ->
Imm = asn1ct_imm:per_dec_length(Constraint, true, is_aligned(Erule)),
asn1ct_imm:dec_slim_cg(Imm, "Bytes").
-gen_decode_sof_components(Erule, Name, Typename, SeqOrSetOf, Cont, NeedRest) ->
+gen_decode_sof_components(Erule, Name, Typename, SeqOrSetOf, Cont) ->
{ObjFun,ObjFun_Var} =
case Cont#type.tablecinf of
[{objfun,_}|_R] ->
@@ -637,14 +610,8 @@ gen_decode_sof_components(Erule, Name, Typename, SeqOrSetOf, Cont, NeedRest) ->
_ ->
{"",""}
end,
- case NeedRest of
- false ->
- emit([{asis,Name},"(0, _Bytes",ObjFun_Var,", Acc) ->",nl,
- "lists:reverse(Acc);",nl]);
- true ->
- emit([{asis,Name},"(0, Bytes",ObjFun_Var,", Acc) ->",nl,
- "{lists:reverse(Acc),Bytes};",nl])
- end,
+ emit([{asis,Name},"(0, Bytes",ObjFun_Var,", Acc) ->",nl,
+ "{lists:reverse(Acc),Bytes};",nl]),
emit([{asis,Name},"(Num, Bytes",ObjFun,", Acc) ->",nl,
"{Term,Remain} = "]),
Constructed_Suffix = asn1ct_gen:constructed_suffix(SeqOrSetOf,
@@ -1024,11 +991,12 @@ enc_var_type_call(Erule, Name, RestFieldNames,
#classdef{typespec=ClassDef} = asn1_db:dbget(ClMod, ClType),
#objectclass{fields=ClassFields} = ClassDef,
Extensible = lists:member('EXTENSIONMARK', ObjSet1),
- ObjSet2 = [{Key,fix_object_code(Name, Code, ClassFields)} ||
- {_,Key,Code} <- ObjSet1],
- ObjSet = lists:sort([P || {_,B}=P <- ObjSet2, B =/= none]),
+ ObjSet = index_object_set(Erule, ClType, Name,
+ ObjSet1, ClassFields),
Key = erlang:md5(term_to_binary({encode,ObjSet,RestFieldNames,Extensible})),
- Imm = enc_objset_imm(Erule, Name, ObjSet, RestFieldNames, Extensible),
+ TypeName = [ClType,Name],
+ Imm = enc_objset_imm(Erule, TypeName, Name, ObjSet,
+ RestFieldNames, Extensible),
Lambda = {lambda,[{var,"Val"},{var,"Id"}],Imm},
Gen = fun(_Fd, N) ->
Aligned = is_aligned(Erule),
@@ -1039,11 +1007,27 @@ enc_var_type_call(Erule, Name, RestFieldNames,
Prefix = lists:concat(["enc_os_",Name]),
[{call_gen,Prefix,Key,Gen,Lambda,[Val,Fun]}].
-fix_object_code(Name, [{Name,B}|_], _ClassFields) ->
- B;
-fix_object_code(Name, [_|T], ClassFields) ->
- fix_object_code(Name, T, ClassFields);
-fix_object_code(Name, [], ClassFields) ->
+index_object_set(_Erules, _ClType, Name, Set0, ClassFields) ->
+ Set = index_object_set_1(Name, Set0, ClassFields),
+ lists:sort(Set).
+
+index_object_set_1(Name, [{_,Key,Code}|T], ClassFields) ->
+ case index_object_set_2(Name, Code, ClassFields) of
+ none ->
+ index_object_set_1(Name, T, ClassFields);
+ Type ->
+ [{Key,Type}|index_object_set_1(Name, T, ClassFields)]
+ end;
+index_object_set_1(Name, [_|T], ClassFields) ->
+ index_object_set_1(Name, T, ClassFields);
+index_object_set_1(_, [], _) ->
+ [].
+
+index_object_set_2(Name, [{Name,Type}|_], _ClassFields) ->
+ Type;
+index_object_set_2(Name, [_|T], ClassFields) ->
+ index_object_set_2(Name, T, ClassFields);
+index_object_set_2(Name, [], ClassFields) ->
case lists:keyfind(Name, 2, ClassFields) of
{typefield,Name,'OPTIONAL'} ->
none;
@@ -1059,7 +1043,8 @@ fix_object_code(Name, [], ClassFields) ->
end
end.
-enc_objset_imm(Erule, Component, ObjSet, RestFieldNames, Extensible) ->
+enc_objset_imm(Erule, TypeName, Component, ObjSet,
+ RestFieldNames, Extensible) ->
Aligned = is_aligned(Erule),
E = {error,
fun() ->
@@ -1070,7 +1055,7 @@ enc_objset_imm(Erule, Component, ObjSet, RestFieldNames, Extensible) ->
end},
[{'cond',
[[{eq,{var,"Id"},Key}|
- enc_obj(Erule, Obj, RestFieldNames, Aligned)] ||
+ enc_obj(Erule, Obj, TypeName, RestFieldNames, Aligned)] ||
{Key,Obj} <- ObjSet] ++
[['_',case Extensible of
false ->
@@ -1086,24 +1071,18 @@ enc_objset_imm(Erule, Component, ObjSet, RestFieldNames, Extensible) ->
end
end]]}].
-enc_obj(Erule, Obj, RestFieldNames0, Aligned) ->
+enc_obj(Erule, Obj, TypeName, RestFieldNames0, Aligned) ->
+ Val = {var,"Val"},
case Obj of
+ #typedef{name={constructed,bif},typespec=Type}=Def ->
+ Prefix = "enc_outlined_",
+ Key = {enc_outlined,Def},
+ Gen = fun(_Fd, Name) ->
+ gen_enc_obj(Erule, Name, TypeName, Type)
+ end,
+ [{call_gen,Prefix,Key,Gen,undefined,[Val]}];
#typedef{name={primitive,bif},typespec=Def} ->
asn1ct_gen_per:gen_encode_prim_imm({var,"Val"}, Def, Aligned);
- #typedef{name={constructed,bif},typespec=Def} ->
- InnerType = asn1ct_gen:get_inner(Def#type.def),
- case InnerType of
- 'CHOICE' ->
- gen_encode_choice_imm(Erule, name, Def);
- 'SET' ->
- gen_encode_constructed_imm(Erule, name, Def);
- 'SET OF' ->
- gen_encode_sof_imm(Erule, name, InnerType, Def);
- 'SEQUENCE' ->
- gen_encode_constructed_imm(Erule, name, Def);
- 'SEQUENCE OF' ->
- gen_encode_sof_imm(Erule, name, InnerType, Def)
- end;
#typedef{name=Type} ->
[{apply,{local,enc_func(Type),Type},[{var,"Val"}]}];
#'Externalvaluereference'{module=Mod,value=Value} ->
@@ -1112,7 +1091,8 @@ enc_obj(Erule, Obj, RestFieldNames0, Aligned) ->
{object,_,Fields} = Def,
[NextField|RestFieldNames] = RestFieldNames0,
{NextField,Typedef} = lists:keyfind(NextField, 1, Fields),
- enc_obj(Erule, Typedef, RestFieldNames, Aligned)
+ enc_obj(Erule, Typedef, TypeName,
+ RestFieldNames, Aligned)
end;
#'Externaltypereference'{module=Mod,type=Type} ->
Func = enc_func(Type),
@@ -1124,6 +1104,11 @@ enc_obj(Erule, Obj, RestFieldNames0, Aligned) ->
end
end.
+gen_enc_obj(Erules, Name, Typename, Type) ->
+ emit([{asis,Name},"(Val) ->",nl]),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ asn1ct_gen:gen_encode_constructed(Erules, Typename,
+ InnerType, Type).
gen_dec_components_call(Erule, TopType, {Root,ExtList},
DecInfObj, Ext, NumberOfOptionals) ->
diff --git a/lib/asn1/src/asn1ct_gen.erl b/lib/asn1/src/asn1ct_gen.erl
index 450d309688..d3c1f34821 100644
--- a/lib/asn1/src/asn1ct_gen.erl
+++ b/lib/asn1/src/asn1ct_gen.erl
@@ -531,34 +531,30 @@ gen_part_decode_funcs({primitive,bif},_TypeName,
gen_part_decode_funcs(WhatKind,_TypeName,{_,Directive,_,_}) ->
throw({error,{asn1,{"Not implemented yet",WhatKind," partial incomplete directive:",Directive}}}).
-
-gen_types(Erules,Tname,{RootL1,ExtList,RootL2})
+%% EncDec = 'gen_encode' | 'gen_decode'
+gen_types(Erules, Tname, {RootL1,ExtList,RootL2}, EncDec)
when is_list(RootL1), is_list(RootL2) ->
- gen_types(Erules,Tname,RootL1),
- Rtmod = ct_gen_module(Erules),
- gen_types(Erules,Tname,Rtmod:extaddgroup2sequence(ExtList)),
- gen_types(Erules,Tname,RootL2);
-gen_types(Erules,Tname,{RootList,ExtList}) when is_list(RootList) ->
- gen_types(Erules,Tname,RootList),
+ gen_types(Erules, Tname, RootL1, EncDec),
Rtmod = ct_gen_module(Erules),
- gen_types(Erules,Tname,Rtmod:extaddgroup2sequence(ExtList));
-gen_types(Erules,Tname,[{'EXTENSIONMARK',_,_}|Rest]) ->
- gen_types(Erules,Tname,Rest);
-gen_types(Erules,Tname,[ComponentType|Rest]) ->
+ gen_types(Erules, Tname, Rtmod:extaddgroup2sequence(ExtList), EncDec),
+ gen_types(Erules, Tname, RootL2, EncDec);
+gen_types(Erules, Tname, {RootList,ExtList}, EncDec) when is_list(RootList) ->
+ gen_types(Erules, Tname, RootList, EncDec),
Rtmod = ct_gen_module(Erules),
+ gen_types(Erules, Tname, Rtmod:extaddgroup2sequence(ExtList), EncDec);
+gen_types(Erules, Tname, [{'EXTENSIONMARK',_,_}|T], EncDec) ->
+ gen_types(Erules, Tname, T, EncDec);
+gen_types(Erules, Tname, [ComponentType|T], EncDec) ->
asn1ct_name:clear(),
- Rtmod:gen_encode(Erules,Tname,ComponentType),
- asn1ct_name:clear(),
- Rtmod:gen_decode(Erules,Tname,ComponentType),
- gen_types(Erules,Tname,Rest);
-gen_types(_,_,[]) ->
- true;
-gen_types(Erules,Tname,Type) when is_record(Type,type) ->
Rtmod = ct_gen_module(Erules),
+ Rtmod:EncDec(Erules, Tname, ComponentType),
+ gen_types(Erules, Tname, T, EncDec);
+gen_types(_, _, [], _) ->
+ ok;
+gen_types(Erules, Tname, #type{}=Type, EncDec) ->
asn1ct_name:clear(),
- Rtmod:gen_encode(Erules,Tname,Type),
- asn1ct_name:clear(),
- Rtmod:gen_decode(Erules,Tname,Type).
+ Rtmod = ct_gen_module(Erules),
+ Rtmod:EncDec(Erules, Tname, Type).
%% VARIOUS GENERATOR STUFF
%% *************************************************
@@ -599,25 +595,25 @@ gen_encode_constructed(Erules,Typename,InnerType,D) when is_record(D,type) ->
'SET' ->
Rtmod:gen_encode_set(Erules,Typename,D),
#'SET'{components=Components} = D#type.def,
- gen_types(Erules,Typename,Components);
+ gen_types(Erules, Typename, Components, gen_encode);
'SEQUENCE' ->
Rtmod:gen_encode_sequence(Erules,Typename,D),
#'SEQUENCE'{components=Components} = D#type.def,
- gen_types(Erules,Typename,Components);
+ gen_types(Erules, Typename, Components, gen_encode);
'CHOICE' ->
Rtmod:gen_encode_choice(Erules,Typename,D),
{_,Components} = D#type.def,
- gen_types(Erules,Typename,Components);
+ gen_types(Erules, Typename, Components, gen_encode);
'SEQUENCE OF' ->
Rtmod:gen_encode_sof(Erules,Typename,InnerType,D),
{_,Type} = D#type.def,
NameSuffix = asn1ct_gen:constructed_suffix(InnerType,Type#type.def),
- gen_types(Erules,[NameSuffix|Typename],Type);
+ gen_types(Erules, [NameSuffix|Typename], Type, gen_encode);
'SET OF' ->
Rtmod:gen_encode_sof(Erules,Typename,InnerType,D),
{_,Type} = D#type.def,
NameSuffix = asn1ct_gen:constructed_suffix(InnerType,Type#type.def),
- gen_types(Erules,[NameSuffix|Typename],Type);
+ gen_types(Erules, [NameSuffix|Typename], Type, gen_encode);
_ ->
exit({nyi,InnerType})
end;
@@ -630,20 +626,29 @@ gen_decode_constructed(Erules,Typename,InnerType,D) when is_record(D,type) ->
asn1ct:step_in_constructed(), %% updates namelist for exclusive decode
case InnerType of
'SET' ->
- Rtmod:gen_decode_set(Erules,Typename,D);
+ Rtmod:gen_decode_set(Erules,Typename,D),
+ #'SET'{components=Components} = D#type.def,
+ gen_types(Erules, Typename, Components, gen_decode);
'SEQUENCE' ->
- Rtmod:gen_decode_sequence(Erules,Typename,D);
+ Rtmod:gen_decode_sequence(Erules,Typename,D),
+ #'SEQUENCE'{components=Components} = D#type.def,
+ gen_types(Erules, Typename, Components, gen_decode);
'CHOICE' ->
- Rtmod:gen_decode_choice(Erules,Typename,D);
+ Rtmod:gen_decode_choice(Erules,Typename,D),
+ {_,Components} = D#type.def,
+ gen_types(Erules, Typename, Components, gen_decode);
'SEQUENCE OF' ->
- Rtmod:gen_decode_sof(Erules,Typename,InnerType,D);
+ Rtmod:gen_decode_sof(Erules,Typename,InnerType,D),
+ {_,#type{def=Def}=Type} = D#type.def,
+ NameSuffix = asn1ct_gen:constructed_suffix(InnerType, Def),
+ gen_types(Erules, [NameSuffix|Typename], Type, gen_decode);
'SET OF' ->
- Rtmod:gen_decode_sof(Erules,Typename,InnerType,D);
- _ ->
- exit({nyi,InnerType})
+ Rtmod:gen_decode_sof(Erules,Typename,InnerType,D),
+ {_,#type{def=Def}=Type} = D#type.def,
+ NameSuffix = asn1ct_gen:constructed_suffix(InnerType, Def),
+ gen_types(Erules, [NameSuffix|Typename], Type, gen_decode)
end;
-
gen_decode_constructed(Erules,Typename,InnerType,D) when is_record(D,typedef) ->
gen_decode_constructed(Erules,Typename,InnerType,D#typedef.typespec).
diff --git a/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl b/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl
index e51b0898be..37413298a7 100644
--- a/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl
+++ b/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2002-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2002-2014. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -24,7 +24,7 @@
-include("asn1_records.hrl").
--export([decode_class/1, decode_type/1]).
+-export([decode_class/1]).
-export([gen_encode/2,gen_encode/3,gen_decode/2,gen_decode/3]).
-export([gen_encode_prim/4]).
-export([gen_dec_prim/3]).
@@ -278,8 +278,7 @@ emit_enc_enumerated_cases(L, Tags) ->
emit_enc_enumerated_cases(L, Tags, noext).
emit_enc_enumerated_cases([{EnumName,EnumVal}|T], Tags, Ext) ->
- Bytes = encode_pos_integer(EnumVal, []),
- Len = length(Bytes),
+ {Bytes,Len} = encode_integer(EnumVal),
emit([{asis,EnumName}," -> ",
{call,ber,encode_tags,[Tags,{asis,Bytes},Len]},";",nl]),
emit_enc_enumerated_cases(T, Tags, Ext);
@@ -288,10 +287,25 @@ emit_enc_enumerated_cases([], _Tags, _Ext) ->
emit([{curr,enumval}," -> exit({error,{asn1, {enumerated_not_in_range,",{curr, enumval},"}}})"]),
emit([nl,"end"]).
-encode_pos_integer(0, [B|_Acc] = L) when B < 128 ->
+encode_integer(Val) ->
+ Bytes =
+ if
+ Val >= 0 ->
+ encode_integer_pos(Val, []);
+ true ->
+ encode_integer_neg(Val, [])
+ end,
+ {Bytes,length(Bytes)}.
+
+encode_integer_pos(0, [B|_Acc]=L) when B < 128 ->
L;
-encode_pos_integer(N, Acc) ->
- encode_pos_integer(N bsr 8, [N band 255|Acc]).
+encode_integer_pos(N, Acc) ->
+ encode_integer_pos((N bsr 8), [N band 16#ff| Acc]).
+
+encode_integer_neg(-1, [B1|_T]=L) when B1 > 127 ->
+ L;
+encode_integer_neg(N, Acc) ->
+ encode_integer_neg(N bsr 8, [N band 16#ff|Acc]).
%%===============================================================================
%%===============================================================================
@@ -1179,23 +1193,25 @@ gen_objset_enc(_,_,{unique,undefined},_,_,_,_,_) ->
gen_objset_enc(Erules, ObjSetName, UniqueName,
[{ObjName,Val,Fields}|T], ClName, ClFields,
NthObj,Acc)->
- emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl]),
CurrMod = get(currmod),
{InternalFunc,NewNthObj}=
case ObjName of
{no_mod,no_name} ->
- gen_inlined_enc_funs(Fields,ClFields,ObjSetName,NthObj);
+ gen_inlined_enc_funs(Fields, ClFields, ObjSetName, Val, NthObj);
{CurrMod,Name} ->
- emit({" fun 'enc_",Name,"'/3"}),
+ emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl,
+ " fun 'enc_",Name,"'/3;",nl]),
{[],NthObj};
{ModuleName,Name} ->
+ emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl]),
emit_ext_fun(enc,ModuleName,Name),
+ emit([";",nl]),
{[],NthObj};
_ ->
- emit({" fun 'enc_",ObjName,"'/3"}),
+ emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl,
+ " fun 'enc_",ObjName,"'/3;",nl]),
{[],NthObj}
end,
- emit({";",nl}),
gen_objset_enc(Erules, ObjSetName, UniqueName, T, ClName, ClFields,
NewNthObj, InternalFunc ++ Acc);
%% See X.681 Annex E for the following case
@@ -1223,13 +1239,14 @@ emit_default_getenc(ObjSetName,UniqueName) ->
%% gen_inlined_enc_funs for each object iterates over all fields of a
%% class, and for each typefield it checks if the object has that
%% field and emits the proper code.
-gen_inlined_enc_funs(Fields, [{typefield,_,_}|_]=T, ObjSetName, NthObj) ->
- emit([indent(3),"fun(Type, Val, _RestPrimFieldName) ->",nl,
+gen_inlined_enc_funs(Fields, [{typefield,_,_}|_]=T, ObjSetName, Val, NthObj) ->
+ emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl,
+ indent(3),"fun(Type, Val, _RestPrimFieldName) ->",nl,
indent(6),"case Type of",nl]),
gen_inlined_enc_funs1(Fields, T, ObjSetName, [], NthObj, []);
-gen_inlined_enc_funs(Fields,[_|Rest],ObjSetName,NthObj) ->
- gen_inlined_enc_funs(Fields,Rest,ObjSetName,NthObj);
-gen_inlined_enc_funs(_,[],_,NthObj) ->
+gen_inlined_enc_funs(Fields, [_|Rest], ObjSetName, Val, NthObj) ->
+ gen_inlined_enc_funs(Fields, Rest, ObjSetName, Val, NthObj);
+gen_inlined_enc_funs(_, [], _, _, NthObj) ->
{[],NthObj}.
gen_inlined_enc_funs1(Fields, [{typefield,Name,_}|Rest], ObjSetName,
@@ -1276,7 +1293,7 @@ gen_inlined_enc_funs1(Fields,[_|Rest], ObjSetName, Sep, NthObj, Acc)->
gen_inlined_enc_funs1(Fields, Rest, ObjSetName, Sep, NthObj, Acc);
gen_inlined_enc_funs1(_, [], _, _, NthObj, Acc) ->
emit([nl,indent(6),"end",nl,
- indent(3),"end"]),
+ indent(3),"end;",nl]),
{Acc,NthObj}.
emit_enc_open_type(I) ->
@@ -1358,23 +1375,25 @@ gen_objset_dec(_,_,{unique,undefined},_,_,_,_) ->
ok;
gen_objset_dec(Erules, ObjSName, UniqueName, [{ObjName,Val,Fields}|T],
ClName, ClFields, NthObj)->
- emit(["'getdec_",ObjSName,"'(",{asis,Val},") ->",nl]),
CurrMod = get(currmod),
NewNthObj=
case ObjName of
{no_mod,no_name} ->
- gen_inlined_dec_funs(Fields,ClFields,ObjSName,NthObj);
+ gen_inlined_dec_funs(Fields,ClFields,ObjSName,Val,NthObj);
{CurrMod,Name} ->
- emit([" fun 'dec_",Name,"'/3"]),
+ emit(["'getdec_",ObjSName,"'(",{asis,Val},") ->",nl,
+ " fun 'dec_",Name,"'/3;", nl]),
NthObj;
{ModuleName,Name} ->
+ emit(["'getdec_",ObjSName,"'(",{asis,Val},") ->",nl]),
emit_ext_fun(dec,ModuleName,Name),
+ emit([";",nl]),
NthObj;
_ ->
- emit([" fun 'dec_",ObjName,"'/3"]),
+ emit(["'getdec_",ObjSName,"'(",{asis,Val},") ->",nl,
+ " fun 'dec_",ObjName,"'/3;", nl]),
NthObj
end,
- emit([";",nl]),
gen_objset_dec(Erules, ObjSName, UniqueName, T, ClName,
ClFields, NewNthObj);
gen_objset_dec(_,ObjSetName,_UniqueName,['EXTENSIONMARK'],_ClName,
@@ -1394,10 +1413,15 @@ emit_default_getdec(ObjSetName,UniqueName) ->
emit(["'getdec_",ObjSetName,"'(ErrV) ->",nl]),
emit([indent(2), "fun(C,V,_) -> exit({{component,C},{value,V},{unique_name_and_value,",{asis,UniqueName},", ErrV}}) end"]).
-gen_inlined_dec_funs(Fields, ClFields, ObjSetName, NthObj) ->
+gen_inlined_dec_funs(Fields, [{typefield,_,_}|_]=ClFields, ObjSetName, Val, NthObj) ->
+ emit(["'getdec_",ObjSetName,"'(",{asis,Val},") ->",nl]),
emit([indent(3),"fun(Type, Bytes, _RestPrimFieldName) ->",nl,
indent(6),"case Type of",nl]),
- gen_inlined_dec_funs1(Fields, ClFields, ObjSetName, "", NthObj).
+ gen_inlined_dec_funs1(Fields, ClFields, ObjSetName, "", NthObj);
+gen_inlined_dec_funs(Fields, [_|ClFields], ObjSetName, Val, NthObj) ->
+ gen_inlined_dec_funs(Fields, ClFields, ObjSetName, Val, NthObj);
+gen_inlined_dec_funs(_, _, _, _,NthObj) ->
+ NthObj.
gen_inlined_dec_funs1(Fields, [{typefield,Name,Prop}|Rest],
ObjSetName, Sep0, NthObj) ->
@@ -1439,7 +1463,7 @@ gen_inlined_dec_funs1(Fields, [_|Rest], ObjSetName, Sep, NthObj)->
gen_inlined_dec_funs1(Fields, Rest, ObjSetName, Sep, NthObj);
gen_inlined_dec_funs1(_, [], _, _, NthObj) ->
emit([nl,indent(6),"end",nl,
- indent(3),"end"]),
+ indent(3),"end;",nl]),
NthObj.
emit_dec_open_type(I) ->
@@ -1534,39 +1558,6 @@ decode_class('CONTEXT') ->
decode_class('PRIVATE') ->
?PRIVATE.
-decode_type('BOOLEAN') -> 1;
-decode_type('INTEGER') -> 2;
-decode_type('BIT STRING') -> 3;
-decode_type('OCTET STRING') -> 4;
-decode_type('NULL') -> 5;
-decode_type('OBJECT IDENTIFIER') -> 6;
-decode_type('ObjectDescriptor') -> 7;
-decode_type('EXTERNAL') -> 8;
-decode_type('REAL') -> 9;
-decode_type('ENUMERATED') -> 10;
-decode_type('EMBEDDED_PDV') -> 11;
-decode_type('UTF8String') -> 12;
-decode_type('RELATIVE-OID') -> 13;
-decode_type('SEQUENCE') -> 16;
-decode_type('SEQUENCE OF') -> 16;
-decode_type('SET') -> 17;
-decode_type('SET OF') -> 17;
-decode_type('NumericString') -> 18;
-decode_type('PrintableString') -> 19;
-decode_type('TeletexString') -> 20;
-decode_type('T61String') -> 20;
-decode_type('VideotexString') -> 21;
-decode_type('IA5String') -> 22;
-decode_type('UTCTime') -> 23;
-decode_type('GeneralizedTime') -> 24;
-decode_type('GraphicString') -> 25;
-decode_type('VisibleString') -> 26;
-decode_type('GeneralString') -> 27;
-decode_type('UniversalString') -> 28;
-decode_type('BMPString') -> 30;
-decode_type('CHOICE') -> 'CHOICE'; % choice gets the tag from the actual alternative
-decode_type(Else) -> exit({error,{asn1,{unrecognized_type,Else}}}).
-
mkfuncname(#'Externaltypereference'{module=Mod,type=EType}, DecOrEnc) ->
CurrMod = get(currmod),
case CurrMod of
diff --git a/lib/asn1/src/asn1ct_imm.erl b/lib/asn1/src/asn1ct_imm.erl
index bdd14871d1..91820e08de 100644
--- a/lib/asn1/src/asn1ct_imm.erl
+++ b/lib/asn1/src/asn1ct_imm.erl
@@ -499,6 +499,8 @@ per_dec_enumerated_fix_list([], Tail, _) -> Tail.
per_dec_integer_1([{'SingleValue',Value}], _Aligned) ->
{value,Value};
+per_dec_integer_1([{'ValueRange',{'MIN',_}}], Aligned) ->
+ per_dec_unconstrained(Aligned);
per_dec_integer_1([{'ValueRange',{Lb,'MAX'}}], Aligned) when is_integer(Lb) ->
per_decode_semi_constrained(Lb, Aligned);
per_dec_integer_1([{'ValueRange',{Lb,Ub}}], Aligned) when is_integer(Lb),
@@ -1094,6 +1096,9 @@ per_enc_integer_1(Val0, [Constr], Aligned) ->
per_enc_integer_2(Val, {'SingleValue',Sv}, Aligned) when is_integer(Sv) ->
per_enc_constrained(Val, Sv, Sv, Aligned);
+per_enc_integer_2(Val, {'ValueRange',{'MIN',Ub}}, Aligned)
+ when is_integer(Ub) ->
+ {[],{lt,Val,Ub+1},per_enc_unconstrained(Val, Aligned)};
per_enc_integer_2(Val0, {'ValueRange',{Lb,'MAX'}}, Aligned)
when is_integer(Lb) ->
{Prefix,Val} = sub_lb(Val0, Lb),
@@ -1580,7 +1585,7 @@ do_combine_put_bits(_, _, _) ->
throw(impossible).
debit(Budget0, Alternatives) ->
- case Budget0 - log2(Alternatives) of
+ case Budget0 - math:log2(Alternatives) of
Budget when Budget > 0.0 ->
Budget;
_ ->
@@ -1593,8 +1598,6 @@ num_clauses([_|T], N) ->
num_clauses(T, N+1);
num_clauses([], N) -> N.
-log2(N) ->
- math:log(N) / math:log(2.0).
collect_put_bits(Imm) ->
lists:splitwith(fun({put_bits,V,_,_}) when is_integer(V) -> true;
diff --git a/lib/asn1/src/asn1ct_parser.yrl b/lib/asn1/src/asn1ct_parser.yrl
deleted file mode 100644
index 083162f191..0000000000
--- a/lib/asn1/src/asn1ct_parser.yrl
+++ /dev/null
@@ -1,1177 +0,0 @@
-%%<copyright>
-%% <year>1997-2008</year>
-%% <holder>Ericsson AB, All Rights Reserved</holder>
-%%</copyright>
-%%<legalnotice>
-%% The contents of this file are subject to the Erlang Public License,
-%% Version 1.1, (the "License"); you may not use this file except in
-%% compliance with the License. You should have received a copy of the
-%% Erlang Public License along with this software. If not, it can be
-%% retrieved online at http://www.erlang.org/.
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% The Initial Developer of the Original Code is Ericsson AB.
-%%</legalnotice>
-%%
-Nonterminals
-ModuleDefinition ModuleIdentifier DefinitiveIdentifier DefinitiveObjIdComponentList
-DefinitiveObjIdComponent TagDefault ExtensionDefault
-ModuleBody Exports SymbolsExported Imports SymbolsImported
-SymbolsFromModuleList SymbolsFromModule GlobalModuleReference AssignedIdentifier SymbolList
-Symbol Reference AssignmentList Assignment
-ExtensionAndException
-ComponentTypeLists
-Externaltypereference Externalvaluereference DefinedType DefinedValue
-AbsoluteReference ItemSpec ItemId ComponentId TypeAssignment
-ValueAssignment
-% ValueSetTypeAssignment
-ValueSet
-Type BuiltinType NamedType ReferencedType
-Value ValueNotNull BuiltinValue ReferencedValue NamedValue
-% BooleanType
-BooleanValue IntegerType NamedNumberList NamedNumber SignedNumber
-% inlined IntegerValue
-EnumeratedType
-% inlined Enumerations
-Enumeration EnumerationItem
-% inlined EnumeratedValue
-% RealType
-RealValue NumericRealValue SpecialRealValue BitStringType
-% inlined BitStringValue
-IdentifierList
-% OctetStringType
-% inlined OctetStringValue
-% NullType NullValue
-SequenceType ComponentTypeList ComponentType
-% SequenceValue SequenceOfValue
-ComponentValueList SequenceOfType
-SAndSOfValue ValueList SetType
-% SetValue SetOfValue
-SetOfType
-ChoiceType
-% AlternativeTypeList made common with ComponentTypeList
-ChoiceValue
-AnyValue
-AnyDefBy
-SelectionType
-TaggedType Tag ClassNumber Class
-% redundant TaggedValue
-% EmbeddedPDVType EmbeddedPDVValue ExternalType ExternalValue ObjectIdentifierType
-ObjectIdentifierValue ObjIdComponentList ObjIdComponent
-% NameForm NumberForm NameAndNumberForm
-CharacterStringType
-RestrictedCharacterStringValue CharacterStringList
-% CharSyms CharsDefn
-Quadruple
-% Group Plane Row Cell
-Tuple
-% TableColumn TableRow
-% UnrestrictedCharacterString
-CharacterStringValue
-% UnrestrictedCharacterStringValue
-ConstrainedType Constraint ConstraintSpec TypeWithConstraint
-ElementSetSpecs ElementSetSpec
-%GeneralConstraint
-UserDefinedConstraint UserDefinedConstraintParameter
-UserDefinedConstraintParameters
-ExceptionSpec
-ExceptionIdentification
-Unions
-UnionMark
-UElems
-Intersections
-IntersectionElements
-IntersectionMark
-IElems
-Elements
-Elems
-SubTypeElements
-Exclusions
-LowerEndpoint
-UpperEndpoint
-LowerEndValue
-UpperEndValue
-TypeConstraints NamedConstraint PresenceConstraint
-
-ParameterizedTypeAssignment
-ParameterList
-Parameters
-Parameter
-ParameterizedType
-
-% X.681
-ObjectClassAssignment ObjectClass ObjectClassDefn
-FieldSpecs FieldSpec OptionalitySpec WithSyntaxSpec
-TokenOrGroupSpecs TokenOrGroupSpec
-SyntaxList OptionalGroup RequiredToken Word
-TypeOptionalitySpec
-ValueOrObjectOptSpec
-VSetOrOSetOptSpec
-ValueOptionalitySpec
-ObjectOptionalitySpec
-ValueSetOptionalitySpec
-ObjectSetOptionalitySpec
-% X.681 chapter 15
-InformationFromObjects
-ValueFromObject
-%ValueSetFromObjects
-TypeFromObject
-%ObjectFromObject
-%ObjectSetFromObjects
-ReferencedObjects
-FieldName
-PrimitiveFieldName
-
-ObjectAssignment
-ObjectSetAssignment
-ObjectSet
-ObjectSetElements
-Object
-ObjectDefn
-DefaultSyntax
-DefinedSyntax
-FieldSettings
-FieldSetting
-DefinedSyntaxTokens
-DefinedSyntaxToken
-Setting
-DefinedObject
-ObjectFromObject
-ObjectSetFromObjects
-ParameterizedObject
-ExternalObjectReference
-DefinedObjectSet
-DefinedObjectClass
-ExternalObjectClassReference
-
-% X.682
-TableConstraint
-ComponentRelationConstraint
-ComponentIdList
-
-% X.683
-ActualParameter
-.
-
-%UsefulType.
-
-Terminals
-'ABSENT' 'ABSTRACT-SYNTAX' 'ALL' 'ANY'
-'APPLICATION' 'AUTOMATIC' 'BEGIN' 'BIT'
-'BOOLEAN' 'BY' 'CHARACTER' 'CHOICE' 'CLASS' 'COMPONENT'
-'COMPONENTS' 'CONSTRAINED' 'DEFAULT' 'DEFINED' 'DEFINITIONS'
-'EMBEDDED' 'END' 'ENUMERATED' 'EXCEPT' 'EXPLICIT'
-'EXPORTS' 'EXTENSIBILITY' 'EXTERNAL' 'FALSE' 'FROM' 'GeneralizedTime'
-'TYPE-IDENTIFIER'
-'IDENTIFIER' 'IMPLICIT' 'IMPLIED' 'IMPORTS'
-'INCLUDES' 'INSTANCE' 'INTEGER' 'INTERSECTION'
-'MAX' 'MIN' 'MINUS-INFINITY' 'NULL'
-'OBJECT' 'ObjectDescriptor' 'OCTET' 'OF' 'OPTIONAL' 'PDV' 'PLUS-INFINITY'
-'PRESENT' 'PRIVATE' 'REAL' 'SEQUENCE' 'SET' 'SIZE'
-'STRING' 'SYNTAX' 'TAGS' 'TRUE' 'UNION'
-'UNIQUE' 'UNIVERSAL' 'UTCTime' 'WITH'
-'{' '}' '(' ')' '.' '::=' ';' ',' '@' '*' '-' '[' ']'
-'!' '..' '...' '|' '<' ':' '^'
-number identifier typereference restrictedcharacterstringtype
-bstring hstring cstring typefieldreference valuefieldreference
-objectclassreference word.
-
-Rootsymbol ModuleDefinition.
-Endsymbol '$end'.
-
-Left 300 'EXCEPT'.
-Left 200 '^'.
-Left 200 'INTERSECTION'.
-Left 100 '|'.
-Left 100 'UNION'.
-
-
-ModuleDefinition -> ModuleIdentifier
- 'DEFINITIONS'
- TagDefault
- ExtensionDefault
- '::='
- 'BEGIN'
- ModuleBody
- 'END' :
- {'ModuleBody',Ex,Im,Types} = '$7',
- {{typereference,Pos,Name},Defid} = '$1',
- #module{
- pos= Pos,
- name= Name,
- defid= Defid,
- tagdefault='$3',
- extensiondefault='$4',
- exports=Ex,
- imports=Im,
- typeorval=Types}.
-% {module, '$1','$3','$6'}.
-% Results always in a record of type module defined in asn_records.hlr
-
-ModuleIdentifier -> typereference DefinitiveIdentifier :
- put(asn1_module,'$1'#typereference.val),
- {'$1','$2'}.
-
-DefinitiveIdentifier -> '{' DefinitiveObjIdComponentList '}' : '$2' .
-DefinitiveIdentifier -> '$empty': [].
-
-DefinitiveObjIdComponentList -> DefinitiveObjIdComponent : ['$1'].
-DefinitiveObjIdComponentList -> DefinitiveObjIdComponent DefinitiveObjIdComponentList : ['$1'|'$2'].
-
-DefinitiveObjIdComponent -> identifier : '$1' . %expanded->
-% DefinitiveObjIdComponent -> NameForm : '$1' .
-DefinitiveObjIdComponent -> number : '$1' . %expanded->
-% DefinitiveObjIdComponent -> DefinitiveNumberForm : 'fix' .
-DefinitiveObjIdComponent -> identifier '(' number ')' : {'$1','$3'} . %expanded->
-% DefinitiveObjIdComponent -> DefinitiveNameAndNumberForm : {'$1','$3'} .
-
-% DefinitiveNumberForm -> number : 'fix' .
-
-% DefinitiveNameAndNumberForm -> identifier '(' DefinitiveNumberForm ')' : 'fix' .
-
-TagDefault -> 'EXPLICIT' 'TAGS' : put(tagdefault,'EXPLICIT'),'EXPLICIT' .
-TagDefault -> 'IMPLICIT' 'TAGS' : put(tagdefault,'IMPLICIT'),'IMPLICIT' .
-TagDefault -> 'AUTOMATIC' 'TAGS' : put(tagdefault,'AUTOMATIC'),'AUTOMATIC' .
-TagDefault -> '$empty': put(tagdefault,'EXPLICIT'),'EXPLICIT'. % because this is the default
-
-ExtensionDefault -> 'EXTENSIBILITY' 'IMPLIED' : 'IMPLIED'.
-ExtensionDefault -> '$empty' : 'false'. % because this is the default
-
-ModuleBody -> Exports Imports AssignmentList : {'ModuleBody','$1','$2','$3'}.
-ModuleBody -> '$empty' : {'ModuleBody',nil,nil,[]}.
-
-Exports -> 'EXPORTS' SymbolList ';' : {exports,'$2'}.
-Exports -> 'EXPORTS' ';' : {exports,[]}.
-Exports -> '$empty' : {exports,all} .
-
-% inlined above SymbolsExported -> SymbolList : '$1'.
-% inlined above SymbolsExported -> '$empty' : [].
-
-Imports -> 'IMPORTS' SymbolsFromModuleList ';' : {imports,'$2'}.
-Imports -> 'IMPORTS' ';' : {imports,[]}.
-Imports -> '$empty' : {imports,[]} .
-
-% inlined above SymbolsImported -> SymbolsFromModuleList : '$1'.
-% inlined above SymbolsImported -> '$empty' : [].
-
-SymbolsFromModuleList -> SymbolsFromModule :['$1'].
-% SymbolsFromModuleList -> SymbolsFromModuleList SymbolsFromModule :$1.%changed
-SymbolsFromModuleList -> SymbolsFromModule SymbolsFromModuleList :['$1'|'$2'].
-
-% expanded SymbolsFromModule -> SymbolList 'FROM' GlobalModuleReference : #'SymbolsFromModule'{symbols = '$1',module='$3'}.
-SymbolsFromModule -> SymbolList 'FROM' typereference : #'SymbolsFromModule'{symbols = '$1',module='$3'}.
-SymbolsFromModule -> SymbolList 'FROM' typereference '{' ValueList '}': #'SymbolsFromModule'{symbols = '$1',module='$3'}.
-%SymbolsFromModule -> SymbolList 'FROM' typereference identifier: #'SymbolsFromModule'{symbols = '$1',module='$3'}.
-%SymbolsFromModule -> SymbolList 'FROM' typereference Externalvaluereference: #'SymbolsFromModule'{symbols = '$1',module='$3'}.
-%SymbolsFromModule -> SymbolList 'FROM' typereference DefinedValue: #'SymbolsFromModule'{symbols = '$1',module='$3'}.
-
-% inlined GlobalModuleReference -> typereference AssignedIdentifier : {'$1','$2'} .
-
-% inlined above AssignedIdentifier -> '{' ValueList '}' : '$2'.
-% replaced AssignedIdentifier -> '{' DefinedValue ObjIdComponentList '}' :{'$2','$3'}.
-% not necessary , replaced by SAndSOfValue AssignedIdentifier -> ObjectIdentifierValue :'$1'.
-% AssignedIdentifier -> DefinedValue : '$1'.
-% inlined AssignedIdentifier -> '$empty' : undefined.
-
-SymbolList -> Symbol : ['$1'].
-SymbolList -> Symbol ',' SymbolList :['$1'|'$3'].
-
-Symbol -> Reference :'$1'.
-% later Symbol -> ParameterizedReference :'$1'.
-
-Reference -> typereference :'$1'.
-Reference -> identifier:'$1'.
-Reference -> typereference '{' '}':'$1'.
-Reference -> Externaltypereference '{' '}':'$1'.
-
-% later Reference -> objectclassreference :'$1'.
-% later Reference -> objectreference :'$1'.
-% later Reference -> objectsetreference :'$1'.
-
-AssignmentList -> Assignment : ['$1'].
-% modified AssignmentList -> AssignmentList Assignment : '$1'.
-AssignmentList -> Assignment AssignmentList : ['$1'|'$2'].
-
-Assignment -> TypeAssignment : '$1'.
-Assignment -> ValueAssignment : '$1'.
-% later Assignment -> ValueSetTypeAssignment : '$1'.
-Assignment -> ObjectClassAssignment : '$1'.
-% later Assignment -> ObjectAssignment : '$1'.
-% combined with ValueAssignment Assignment -> ObjectAssignment : '$1'.
-Assignment -> ObjectSetAssignment : '$1'.
-Assignment -> ParameterizedTypeAssignment : '$1'.
-%Assignment -> ParameterizedValueAssignment : '$1'.
-%Assignment -> ParameterizedValueSetTypeAssignment : '$1'.
-%Assignment -> ParameterizedObjectClassAssignment : '$1'.
-
-ObjectClassAssignment -> typereference '::=' 'CLASS' '{' FieldSpecs '}' :
-%ObjectClassAssignment -> objectclassreference '::=' 'CLASS' '{' FieldSpecs '}' :
- #typedef{pos=element(2,'$1'),name=element(3,'$1'),typespec={'CLASS','$5',[]}}.
-ObjectClassAssignment -> typereference '::=' 'CLASS' '{' FieldSpecs '}' WithSyntaxSpec :
-%ObjectClassAssignment -> objectclassreference '::=' 'CLASS' '{' FieldSpecs '}' WithSyntaxSpec :
- #typedef{pos=element(2,'$1'),name=element(3,'$1'),typespec={'CLASS','$5','$7'}}.
-
-FieldSpecs -> FieldSpec : ['$1'].
-FieldSpecs -> FieldSpec ',' FieldSpecs : ['$1'|'$3'].
-
-FieldSpec -> typefieldreference TypeOptionalitySpec : {typefield,'$1','$2'}.
-
-FieldSpec -> valuefieldreference Type 'UNIQUE' ValueOrObjectOptSpec :
- {fixedtypevaluefield,'$1','$2','UNIQUE','$4'}.
-FieldSpec -> valuefieldreference Type ValueOrObjectOptSpec :
- {fixedtypevaluefield,'$1','$2',undefined,'$3'}.
-
-FieldSpec -> valuefieldreference typefieldreference ValueOrObjectOptSpec :
- {variabletypevaluefield, '$1','$2','$3'}.
-
-FieldSpec -> typefieldreference typefieldreference VSetOrOSetOptSpec :
- {variabletypevaluesetfield, '$1','$2','$3'}.
-
-FieldSpec -> typefieldreference Type VSetOrOSetOptSpec :
- {fixedtypevaluesetfield, '$1','$2','$3'}.
-
-TypeOptionalitySpec -> 'DEFAULT' Type : {'DEFAULT','$2'}.
-TypeOptionalitySpec -> 'OPTIONAL' : 'OPTIONAL'.
-TypeOptionalitySpec -> '$empty' : 'MANDATORY'.
-
-ValueOrObjectOptSpec -> ValueOptionalitySpec : '$1'.
-ValueOrObjectOptSpec -> ObjectOptionalitySpec : '$1'.
-ValueOrObjectOptSpec -> 'OPTIONAL' : 'OPTIONAL'.
-ValueOrObjectOptSpec -> '$empty' : 'MANDATORY'.
-
-ValueOptionalitySpec -> 'DEFAULT' Value :
- case '$2' of
- {identifier,_,Id} -> {'DEFAULT',Id};
- _ -> {'DEFAULT','$2'}
- end.
-
-%ObjectOptionalitySpec -> 'DEFAULT' Object :{'DEFAULT','$1'}.
-ObjectOptionalitySpec -> 'DEFAULT' '{' FieldSetting ',' FieldSettings '}' :
- {'DEFAULT',{object,['$2'|'$4']}}.
-ObjectOptionalitySpec -> 'DEFAULT' '{' FieldSetting '}' :
- {'DEFAULT',{object, ['$2']}}.
-%ObjectOptionalitySpec -> 'DEFAULT' '{' DefinedSyntaxTokens '}' :
-% {'DEFAULT',{object, '$2'}}.
-ObjectOptionalitySpec -> 'DEFAULT' ObjectFromObject :
- {'DEFAULT',{object, '$2'}}.
-
-
-VSetOrOSetOptSpec -> ValueSetOptionalitySpec : '$1'.
-%VSetOrOSetOptSpec -> ObjectSetOptionalitySpec : '$1'.
-VSetOrOSetOptSpec -> 'OPTIONAL' : 'OPTIONAL'.
-VSetOrOSetOptSpec -> '$empty' : 'MANDATORY'.
-
-ValueSetOptionalitySpec -> 'DEFAULT' ValueSet : {'DEFAULT','$1'}.
-
-%ObjectSetOptionalitySpec -> 'DEFAULT' ObjectSet : {'DEFAULT','$1'}.
-
-OptionalitySpec -> 'DEFAULT' Type : {'DEFAULT','$2'}.
-OptionalitySpec -> 'DEFAULT' ValueNotNull :
- case '$2' of
- {identifier,_,Id} -> {'DEFAULT',Id};
- _ -> {'DEFAULT','$2'}
- end.
-OptionalitySpec -> 'OPTIONAL' : 'OPTIONAL'.
-OptionalitySpec -> '$empty' : 'MANDATORY'.
-
-WithSyntaxSpec -> 'WITH' 'SYNTAX' SyntaxList : {'WITH SYNTAX','$3'}.
-
-SyntaxList -> '{' TokenOrGroupSpecs '}' : '$2'.
-SyntaxList -> '{' '}' : [].
-
-TokenOrGroupSpecs -> TokenOrGroupSpec : ['$1'].
-TokenOrGroupSpecs -> TokenOrGroupSpec TokenOrGroupSpecs : ['$1'|'$2'].
-
-TokenOrGroupSpec -> RequiredToken : '$1'.
-TokenOrGroupSpec -> OptionalGroup : '$1'.
-
-OptionalGroup -> '[' TokenOrGroupSpecs ']' : '$2'.
-
-RequiredToken -> typereference : '$1'.
-RequiredToken -> Word : '$1'.
-RequiredToken -> ',' : '$1'.
-RequiredToken -> PrimitiveFieldName : '$1'.
-
-Word -> 'BY' : 'BY'.
-
-ParameterizedTypeAssignment -> typereference ParameterList '::=' Type :
- #ptypedef{pos=element(2,'$1'),name=element(3,'$1'),
- args='$2', typespec='$4'}.
-
-ParameterList -> '{' Parameters '}':'$2'.
-
-Parameters -> Parameter: ['$1'].
-Parameters -> Parameter ',' Parameters: ['$1'|'$3'].
-
-Parameter -> typereference: '$1'.
-Parameter -> Value: '$1'.
-Parameter -> Type ':' typereference: {'$1','$3'}.
-Parameter -> Type ':' Value: {'$1','$3'}.
-Parameter -> '{' typereference '}': {objectset,'$2'}.
-
-
-% Externaltypereference -> modulereference '.' typereference : {'$1','$3'} .
-Externaltypereference -> typereference '.' typereference : #'Externaltypereference'{pos=element(2,'$1'),module=element(3,'$1'),type=element(3,'$3')}.
-
-% Externalvaluereference -> modulereference '.' valuereference : {'$1','$3'} .
-% inlined Externalvaluereference -> typereference '.' identifier : #'Externalvaluereference'{pos=element(2,'$1'),module=element(3,'$1'),value=element(3,'$3')}.
-
-
-DefinedType -> Externaltypereference : '$1' .
-DefinedType -> typereference :
- #'Externaltypereference'{pos='$1'#typereference.pos,
- module= get(asn1_module),
- type= '$1'#typereference.val} .
-DefinedType -> typereference ParameterList : {pt,'$1','$2'}.
-DefinedType -> Externaltypereference ParameterList : {pt,'$1','$2'}.
-
-% ActualParameterList -> '{' ActualParameters '}' : '$1'.
-
-% ActualParameters -> ActualParameter : ['$1'].
-% ActualParameters -> ActualParameter ',' ActualParameters : ['$1'|'$3'].
-
-ActualParameter -> Type : '$1'.
-ActualParameter -> ValueNotNull : '$1'.
-ActualParameter -> ValueSet : '$1'.
-% later DefinedType -> ParameterizedType : '$1' .
-% later DefinedType -> ParameterizedValueSetType : '$1' .
-
-% inlined DefinedValue -> Externalvaluereference :'$1'.
-% inlined DefinedValue -> identifier :'$1'.
-% later DefinedValue -> ParameterizedValue :'$1'.
-
-% not referenced yet AbsoluteReference -> '@' GlobalModuleReference '.' ItemSpec :{'$2','$4'}.
-
-% not referenced yet ItemSpec -> typereference :'$1'.
-% not referenced yet ItemSpec -> ItemId '.' ComponentId : {'$1','$3'}.
-
-% not referenced yet ItemId -> ItemSpec : '$1'.
-
-% not referenced yet ComponentId -> identifier :'$1'.
-% not referenced yet ComponentId -> number :'$1'.
-% not referenced yet ComponentId -> '*' :'$1'.
-
-TypeAssignment -> typereference '::=' Type :
- #typedef{pos=element(2,'$1'),name=element(3,'$1'),typespec='$3'}.
-
-ValueAssignment -> identifier Type '::=' Value :
- #valuedef{pos=element(2,'$1'),name=element(3,'$1'),type='$2',value='$4'}.
-
-% later ValueSetTypeAssignment -> typereference Type '::=' ValueSet :{'ValueSetTypeAssignment','$1','$2','$4'}.
-
-
-ValueSet -> '{' ElementSetSpec '}' : {valueset,'$2'}.
-
-% record(type,{tag,def,constraint}).
-Type -> BuiltinType :#type{def='$1'}.
-Type -> 'NULL' :#type{def='NULL'}.
-Type -> TaggedType:'$1'.
-Type -> ReferencedType:#type{def='$1'}. % change notag later
-Type -> ConstrainedType:'$1'.
-
-%ANY is here for compatibility with the old ASN.1 standard from 1988
-BuiltinType -> 'ANY' AnyDefBy:
- case '$2' of
- [] -> 'ANY';
- _ -> {'ANY DEFINED BY','$2'}
- end.
-BuiltinType -> BitStringType :'$1'.
-BuiltinType -> 'BOOLEAN' :element(1,'$1').
-BuiltinType -> CharacterStringType :'$1'.
-BuiltinType -> ChoiceType :'$1'.
-BuiltinType -> 'EMBEDDED' 'PDV' :'EMBEDDED PDV'.
-BuiltinType -> EnumeratedType :'$1'.
-BuiltinType -> 'EXTERNAL' :element(1,'$1').
-% later BuiltinType -> InstanceOfType :'$1'.
-BuiltinType -> IntegerType :'$1'.
-% BuiltinType -> 'NULL' :element(1,'$1').
-% later BuiltinType -> ObjectClassFieldType :'$1'.
-BuiltinType -> 'OBJECT' 'IDENTIFIER' :'OBJECT IDENTIFIER'.
-BuiltinType -> 'OCTET' 'STRING' :'OCTET STRING'.
-BuiltinType -> 'REAL' :element(1,'$1').
-BuiltinType -> SequenceType :'$1'.
-BuiltinType -> SequenceOfType :'$1'.
-BuiltinType -> SetType :'$1'.
-BuiltinType -> SetOfType :'$1'.
-% The so called Useful types
-BuiltinType -> 'GeneralizedTime': 'GeneralizedTime'.
-BuiltinType -> 'UTCTime' :'UTCTime'.
-BuiltinType -> 'ObjectDescriptor' : 'ObjectDescriptor'.
-
-% moved BuiltinType -> TaggedType :'$1'.
-
-
-AnyDefBy -> 'DEFINED' 'BY' identifier: '$3'.
-AnyDefBy -> '$empty': [].
-
-NamedType -> identifier Type :
-%{_,Pos,Val} = '$1',
-%{'NamedType',Pos,{Val,'$2'}}.
-V1 = '$1',
-{'NamedType',V1#identifier.pos,{V1#identifier.val,'$2'}}.
-NamedType -> SelectionType :'$1'.
-
-ReferencedType -> DefinedType : '$1'.
-% redundant ReferencedType -> UsefulType : 'fix'.
-ReferencedType -> SelectionType : '$1'.
-ReferencedType -> TypeFromObject : '$1'.
-% later ReferencedType -> ValueSetFromObjects : 'fix'.
-
-% to much conflicts Value -> AnyValue :'$1'.
-Value -> ValueNotNull : '$1'.
-Value -> 'NULL' :element(1,'$1').
-
-ValueNotNull -> BuiltinValue :'$1'.
-% inlined Value -> DefinedValue :'$1'. % DefinedValue , identifier
-% inlined Externalvaluereference -> Externalvaluereference :'$1'.
-ValueNotNull -> typereference '.' identifier :
- #'Externalvaluereference'{pos=element(2,'$1'),module=element(3,'$1'),
- value=element(3,'$3')}.
-ValueNotNull -> identifier :'$1'.
-
-
-%tmp Value -> NamedNumber: '$1'. % not a value but part of ObjIdC
-% redundant BuiltinValue -> BitStringValue :'$1'.
-BuiltinValue -> BooleanValue :'$1'.
-BuiltinValue -> CharacterStringValue :'$1'.
-BuiltinValue -> ChoiceValue :'$1'.
-% BuiltinValue -> EmbeddedPDVValue :'$1'. ==SequenceValue
-% BuiltinValue -> EnumeratedValue :'$1'. identifier
-% BuiltinValue -> ExternalValue :'$1'. ==SequenceValue
-% later BuiltinValue -> InstanceOfValue :'$1'.
-BuiltinValue -> SignedNumber :'$1'.
-% BuiltinValue -> 'NULL' :'$1'.
-% later BuiltinValue -> ObjectClassFieldValue :'$1'.
-% replaced by SAndSOfValue BuiltinValue -> ObjectIdentifierValue :'$1'.
-BuiltinValue -> bstring :element(3,'$1').
-BuiltinValue -> hstring :element(3,'$1').
-% conflict BuiltinValue -> RealValue :'$1'.
-BuiltinValue -> SAndSOfValue :'$1'.
-% replaced BuiltinValue -> SequenceOfValue :'$1'.
-% replaced BuiltinValue -> SequenceValue :'$1'.
-% replaced BuiltinValue -> SetValue :'$1'.
-% replaced BuiltinValue -> SetOfValue :'$1'.
-% conflict redundant BuiltinValue -> TaggedValue :'$1'.
-
-% inlined ReferencedValue -> DefinedValue:'$1'.
-% ReferencedValue -> Externalvaluereference:'$1'.
-% ReferencedValue -> identifier :'$1'.
-% later ReferencedValue -> ValueFromObject:'$1'.
-
-% inlined BooleanType -> BOOLEAN :'BOOLEAN'.
-
-% to much conflicts AnyValue -> Type ':' Value : {'ANYVALUE',{'$1','$3'}}.
-
-BooleanValue -> TRUE :true.
-BooleanValue -> FALSE :false.
-
-IntegerType -> 'INTEGER' : 'INTEGER'.
-IntegerType -> 'INTEGER' '{' NamedNumberList '}' : {'INTEGER','$3'}.
-
-NamedNumberList -> NamedNumber :['$1'].
-% modified NamedNumberList -> NamedNumberList ',' NamedNumber :'fix'.
-NamedNumberList -> NamedNumber ',' NamedNumberList :['$1'|'$3'].
-
-NamedNumber -> identifier '(' SignedNumber ')' : {'NamedNumber',element(3,'$1'),'$3'}.
-NamedNumber -> identifier '(' typereference '.' identifier ')' : {'NamedNumber',element(3,'$1'),{'ExternalValue',element(3,'$3'),element(3,'$5')}}.
-NamedNumber -> identifier '(' identifier ')' : {'NamedNumber',element(3,'$1'),element(3,'$3')}.
-
-%NamedValue -> identifier Value :
-% {'NamedValue',element(2,'$1'),element(3,'$1'),'$2'}.
-
-
-SignedNumber -> number : element(3,'$1').
-SignedNumber -> '-' number : - element(3,'$1').
-
-% inlined IntegerValue -> SignedNumber :'$1'.
-% conflict moved to Value IntegerValue -> identifier:'$1'.
-
-EnumeratedType -> ENUMERATED '{' Enumeration '}' :{'ENUMERATED','$3'}.
-
-% inlined Enumerations -> Enumeration :{'$1','false',[]}.
-% inlined Enumerations -> Enumeration ',' '...' : {'$1','true',[]}.
-% inlined Enumerations -> Enumeration ',' '...' ',' Enumeration : {'$1','true','$5'}.
-
-Enumeration -> EnumerationItem :['$1'].
-% modified Enumeration -> EnumerationItem ',' Enumeration :'fix'.
-Enumeration -> EnumerationItem ',' Enumeration :['$1'|'$3'].
-
-EnumerationItem -> identifier:element(3,'$1').
-EnumerationItem -> NamedNumber :'$1'.
-EnumerationItem -> '...' :'EXTENSIONMARK'.
-
-% conflict moved to Value EnumeratedValue -> identifier:'$1'.
-
-% inlined RealType -> REAL:'REAL'.
-
-RealValue -> NumericRealValue :'$1'.
-RealValue -> SpecialRealValue:'$1'.
-
-% ?? NumericRealValue -> number:'$1'. % number MUST BE '0'
-NumericRealValue -> SAndSOfValue : '$1'. % Value of the associated sequence type
-
-SpecialRealValue -> 'PLUS-INFINITY' :'$1'.
-SpecialRealValue -> 'MINUS-INFINITY' :'$1'.
-
-BitStringType -> 'BIT' 'STRING' :{'BIT STRING',[]}.
-BitStringType -> 'BIT' 'STRING' '{' NamedNumberList '}' :{'BIT STRING','$4'}.
-% NamedBitList replaced by NamedNumberList to reduce the grammar
-% Must check later that all "numbers" are positive
-
-% inlined BitStringValue -> bstring:'$1'.
-% inlined BitStringValue -> hstring:'$1'.
-% redundant use SequenceValue BitStringValue -> '{' IdentifierList '}' :$2.
-% redundant use SequenceValue BitStringValue -> '{' '}' :'fix'.
-
-IdentifierList -> identifier :[element(3,'$1')].
-% modified IdentifierList -> IdentifierList ',' identifier :'$1'.
-IdentifierList -> identifier ',' IdentifierList :[element(3,'$1')|'$3'].
-
-% inlined OctetStringType -> 'OCTET' 'STRING' :'OCTET STRING'.
-
-% inlined OctetStringValue -> bstring:'$1'.
-% inlined OctetStringValue -> hstring:'$1'.
-
-% inlined NullType -> 'NULL':'NULL'.
-
-% inlined NullValue -> NULL:'NULL'.
-
-% result is {'SEQUENCE',Optionals,Extensionmark,Componenttypelist}.
-SequenceType -> SEQUENCE '{' ComponentTypeList '}' :{'SEQUENCE','$3'}.
-% SequenceType -> SEQUENCE '{' ComponentTypeLists '}' :{'SEQUENCE','$3'}.
-% SequenceType -> SEQUENCE '{' ExtensionAndException '}' :{'SEQUENCE','$3'}.
-SequenceType -> SEQUENCE '{' '}' :{'SEQUENCE',[]}.
-
-% result is {RootComponentList,ExtensionAndException,AdditionalComponentTypeList}.
-%ComponentTypeLists -> ComponentTypeList ',' ExtensionAndException :{'$1','$3',[]}.
-%ComponentTypeLists -> ComponentTypeList :{'$1','false',[]}.
-%ComponentTypeLists -> ComponentTypeList ',' ExtensionAndException
-% ',' ComponentTypeList :{'$1','$3', '$5'}.
-%ComponentTypeLists -> ExtensionAndException ',' ComponentTypeList :{[],'$1','$3'}.
-
-ComponentTypeList -> ComponentType :['$1'].
-% modified below ComponentTypeList -> ComponentTypeList ',' ComponentType :'$1'.
-ComponentTypeList -> ComponentType ',' ComponentTypeList :['$1'|'$3'].
-
-% -record('ComponentType',{pos,name,type,attrib}).
-ComponentType -> '...' ExceptionSpec :{'EXTENSIONMARK',element(2,'$1'),'$2'}.
-ComponentType -> NamedType :
- {'NamedType',Pos,{Name,Type}} = '$1',
- #'ComponentType'{pos=Pos,name=Name,typespec=Type,prop=mandatory}.
-ComponentType -> NamedType 'OPTIONAL' :
- {'NamedType',Pos,{Name,Type}} = '$1',
- #'ComponentType'{pos=Pos,name=Name,typespec=Type,prop='OPTIONAL'}.
-ComponentType -> NamedType 'DEFAULT' Value:
- {'NamedType',Pos,{Name,Type}} = '$1',
- #'ComponentType'{pos=Pos,name=Name,typespec=Type,prop={'DEFAULT','$3'}}.
-ComponentType -> 'COMPONENTS' 'OF' Type :{'COMPONENTS OF','$3'}.
-
-% redundant ExtensionAndException -> '...' : extensionmark.
-% ExtensionAndException -> '...' ExceptionSpec : {extensionmark,'$2'}.
-
-% replaced SequenceValue -> '{' ComponentValueList '}':'$2'.
-% replaced SequenceValue -> '{' '}':[].
-
-ValueList -> Value :['$1'].
-ValueList -> NamedNumber :['$1'].
-% modified ValueList -> ValueList ',' Value :'$1'.
-ValueList -> Value ',' ValueList :['$1'|'$3'].
-ValueList -> Value ',' '...' :['$1' |[]].
-ValueList -> Value ValueList : ['$1',space|'$2'].
-ValueList -> NamedNumber ValueList: ['$1',space|'$2'].
-
-%ComponentValueList -> identifier ObjIdComponent:[{'NamedValue','$1','$2'}].
-%ComponentValueList -> NamedValue :['$1'].
-%ComponentValueList -> NamedValue ',' ComponentValueList:['$1'|'$3'].
-%ComponentValueList -> identifier ObjIdComponent ',' ComponentValueList :[{'NamedValue', '$1','$2'}|'$4'].
-
-SequenceOfType -> SEQUENCE OF Type : {'SEQUENCE OF','$3'}.
-
-% replaced SequenceOfValue with SAndSOfValue
-
-SAndSOfValue -> '{' ValueList '}' :'$2'.
-%SAndSOfValue -> '{' ComponentValueList '}' :'$2'.
-SAndSOfValue -> '{' '}' :[].
-
-% save for later SetType ->
-% result is {'SET',Optionals,Extensionmark,Componenttypelist}.
-SetType -> SET '{' ComponentTypeList '}' :{'SET','$3'}.
-% SetType -> SET '{' ExtensionAndException '}' :{'SET','$3'}.
-SetType -> SET '{' '}' :{'SET',[]}.
-
-% replaced SetValue with SAndSOfValue
-
-SetOfType -> SET OF Type : {'SET OF','$3'}.
-
-% replaced SetOfValue with SAndSOfValue
-
-ChoiceType -> 'CHOICE' '{' ComponentTypeList '}' :{'CHOICE','$3'}.
-% AlternativeTypeList is replaced by ComponentTypeList
-ChoiceValue -> identifier ':' Value : {'ChoiceValue',element(3,'$1'),'$3'}.
-% save for later SelectionType ->
-
-TaggedType -> Tag Type : '$2'#type{tag=['$1'#tag{type={default,get(tagdefault)}}]}.
-TaggedType -> Tag IMPLICIT Type :'$3'#type{tag=['$1'#tag{type='IMPLICIT'}]}.
-TaggedType -> Tag EXPLICIT Type :'$3'#type{tag=['$1'#tag{type='EXPLICIT'}]}.
-
-Tag -> '[' Class ClassNumber ']': #tag{class='$2',number='$3'}.
-Tag -> '[' Class typereference '.' identifier ']':
- #tag{class='$2',number=#'Externalvaluereference'{pos=element(2,'$3'),module=element(3,'$3'),
- value=element(3,'$5')}}.
-Tag -> '[' Class number ']': #tag{class='$2',number=element(3,'$3')}.
-Tag -> '[' Class identifier ']': #tag{class='$2',number=element(3,'$3')}.
-
-ClassNumber -> number :element(3,'$1').
-% inlined above ClassNumber -> typereference '.' identifier :{'Externalvaluereference',element(3,'$1'),element(3,'$3')}.
-ClassNumber -> identifier :element(3,'$1').
-
-Class -> 'UNIVERSAL' :element(1,'$1').
-Class -> 'APPLICATION' :element(1,'$1').
-Class -> 'PRIVATE' :element(1,'$1').
-Class -> '$empty' :'CONTEXT'.
-
-% conflict redundant TaggedValue -> Value:'$1'.
-
-% inlined EmbeddedPDVType -> 'EMBEDDED' 'PDV' :'EMBEDDED PDV'.
-
-% inlined EmbeddedPDVValue -> SequenceValue:'$1'.
-
-% inlined ExternalType -> 'EXTERNAL' :'EXTERNAL'.
-
-% inlined ExternalValue -> SequenceValue :'$1'.
-
-% inlined ObjectIdentifierType -> 'OBJECT' 'IDENTIFIER' :'OBJECT IDENTIFIER'.
-
-ObjectIdentifierValue -> '{' ObjIdComponentList '}' :'$2'.
-% inlined ObjectIdentifierValue -> SequenceAndSequenceOfValue :'$1'.
-% ObjectIdentifierValue -> '{' identifier ObjIdComponentList '}' :{'ObjectIdentifierValue','$2','$3'}.
-% ObjectIdentifierValue -> '{' typereference '.' identifier ObjIdComponentList '}' :{'ObjectIdentifierValue',{'$2','$4'},'$5'}.
-
-ObjIdComponentList -> Value:'$1'.
-ObjIdComponentList -> Value ObjIdComponentList :['$1'|'$2'].
-%ObjIdComponentList -> DefinedValue:'$1'.
-%ObjIdComponentList -> number:'$1'.
-%ObjIdComponentList -> DefinedValue ObjIdComponentList :['$1'|'$2'].
-%ObjIdComponentList -> number ObjIdComponentList :['$1'|'$2'].
-%ObjIdComponentList -> ObjIdComponent ObjIdComponentList :['$1'|'$2'].
-%ObjIdComponentList -> ObjIdComponent ObjIdComponentList :['$1'|'$2'].
-
-% redundant ObjIdComponent -> NameForm :'$1'. % expanded
-% replaced by 2 ObjIdComponent -> NumberForm :'$1'.
-% ObjIdComponent -> number :'$1'.
-% ObjIdComponent -> DefinedValue :'$1'. % means DefinedValue
-% ObjIdComponent -> NameAndNumberForm :'$1'.
-% ObjIdComponent -> NamedNumber :'$1'.
-% NamedBit replaced by NamedNumber to reduce grammar
-% must check later that "number" is positive
-
-% NameForm -> identifier:'$1'.
-
-% inlined NumberForm -> number :'$1'.
-% inlined NumberForm -> DefinedValue :'$1'.
-
-% replaced by NamedBit NameAndNumberForm -> identifier '(' NumberForm ')'.
-% NameAndNumberForm -> NamedBit:'$1'.
-
-
-CharacterStringType -> restrictedcharacterstringtype :element(3,'$1').
-CharacterStringType -> 'CHARACTER' 'STRING' :'CHARACTER STRING'.
-
-RestrictedCharacterStringValue -> cstring :element(3, '$1').
-% modified below RestrictedCharacterStringValue -> CharacterStringList :'$1'.
-% conflict vs BuiltinValue RestrictedCharacterStringValue -> SequenceAndSequenceOfValue :'$1'.
-RestrictedCharacterStringValue -> Quadruple :'$1'.
-RestrictedCharacterStringValue -> Tuple :'$1'.
-
-% redundant CharacterStringList -> '{' ValueList '}' :'$2'. % modified
-
-% redundant CharSyms -> CharsDefn :'$1'.
-% redundant CharSyms -> CharSyms ',' CharsDefn :['$1'|'$3'].
-
-% redundant CharsDefn -> cstring :'$1'.
-% temporary replaced see below CharsDefn -> DefinedValue :'$1'.
-% redundant CharsDefn -> Value :'$1'.
-
-Quadruple -> '{' number ',' number ',' number ',' number '}' :{'Quadruple','$2','$4','$6','$8'}.
-% {Group,Plane,Row,Cell}
-
-Tuple -> '{' number ',' number '}' :{'Tuple', '$2','$4'}.
-% {TableColumn,TableRow}
-
-% inlined UnrestrictedCharacterString -> 'CHARACTER' 'STRING' :'CHARACTER STRING'.
-
-CharacterStringValue -> RestrictedCharacterStringValue :'$1'.
-% conflict vs BuiltinValue CharacterStringValue -> SequenceValue :'$1'. % UnrestrictedCharacterStringValue
-
-% inlined UsefulType -> typereference :'$1'.
-
-SelectionType -> identifier '<' Type : {'SelectionType',element(3,'$1'),'$3'}.
-
-ConstrainedType -> Type Constraint :
- '$1'#type{constraint=merge_constraints(['$2'])}.
-ConstrainedType -> Type Constraint Constraint :
- '$1'#type{constraint=merge_constraints(['$2','$3'])}.
-ConstrainedType -> Type Constraint Constraint Constraint:
- '$1'#type{constraint=merge_constraints(['$2','$3','$4'])}.
-ConstrainedType -> Type Constraint Constraint Constraint Constraint:
- '$1'#type{constraint=merge_constraints(['$2','$3','$4','$5'])}.
-%ConstrainedType -> Type Constraint :'$1'#type{constraint='$2'}.
-%ConstrainedType -> Type Constraint :'$1'#type{constraint='$2'}.
-ConstrainedType -> TypeWithConstraint :'$1'.
-
-TypeWithConstraint -> 'SET' Constraint 'OF' Type :
- #type{def = {'SET OF','$4'},constraint=merge_constraints(['$2'])}.
-TypeWithConstraint -> 'SET' 'SIZE' Constraint 'OF' Type :
- #type{def = {'SET OF','$5'},constraint = merge_constraints([#constraint{c={'SizeConstraint','$3'#constraint.c}}])}.
-TypeWithConstraint -> 'SEQUENCE' Constraint 'OF' Type :
- #type{def = {'SEQUENCE OF','$4'},constraint =
- merge_constraints(['$2'])}.
-TypeWithConstraint -> 'SEQUENCE' 'SIZE' Constraint 'OF' Type :
- #type{def = {'SEQUENCE OF','$5'},constraint = merge_constraints([#constraint{c={'SizeConstraint','$3'#constraint.c}}])}.
-
-
-Constraint -> '(' ConstraintSpec ExceptionSpec ')' :
- #constraint{c='$2',e='$3'}.
-
-% inlined Constraint -> SubTypeConstraint :'$1'.
-ConstraintSpec -> ElementSetSpecs :'$1'.
-ConstraintSpec -> UserDefinedConstraint :'$1'.
-ConstraintSpec -> TableConstraint :'$1'.
-
-TableConstraint -> ComponentRelationConstraint : '$1'.
-TableConstraint -> ObjectSet : '$1'.
-%TableConstraint -> '{' typereference '}' :tableconstraint.
-
-ComponentRelationConstraint -> '{' typereference '}' '{' '@' ComponentIdList '}' : componentrelation.
-ComponentRelationConstraint -> '{' typereference '}' '{' '@' '.' ComponentIdList '}' : componentrelation.
-
-ComponentIdList -> identifier: ['$1'].
-ComponentIdList -> identifier '.' ComponentIdList: ['$1'| '$3'].
-
-
-% later ConstraintSpec -> GeneralConstraint :'$1'.
-
-% from X.682
-UserDefinedConstraint -> 'CONSTRAINED' 'BY' '{' '}' : {constrained_by,[]}.
-UserDefinedConstraint -> 'CONSTRAINED' 'BY'
- '{' UserDefinedConstraintParameters '}' : {constrained_by,'$4'}.
-
-UserDefinedConstraintParameters -> UserDefinedConstraintParameter : ['$1'].
-UserDefinedConstraintParameters ->
- UserDefinedConstraintParameter ','
- UserDefinedConstraintParameters: ['$1'|'$3'].
-
-UserDefinedConstraintParameter -> Type '.' ActualParameter : {'$1','$3'}.
-UserDefinedConstraintParameter -> ActualParameter : '$1'.
-
-
-
-ExceptionSpec -> '!' ExceptionIdentification : '$1'.
-ExceptionSpec -> '$empty' : undefined.
-
-ExceptionIdentification -> SignedNumber : '$1'.
-% inlined ExceptionIdentification -> DefinedValue : '$1'.
-ExceptionIdentification -> typereference '.' identifier :
- #'Externalvaluereference'{pos=element(2,'$1'),module=element(3,'$1'),
- value=element(3,'$1')}.
-ExceptionIdentification -> identifier :'$1'.
-ExceptionIdentification -> Type ':' Value : {'$1','$3'}.
-
-% inlined SubTypeConstraint -> ElementSetSpec
-
-ElementSetSpecs -> ElementSetSpec : '$1'.
-ElementSetSpecs -> ElementSetSpec ',' '...': {'$1',[]}.
-ElementSetSpecs -> '...' ',' ElementSetSpec : {[],'$3'}.
-ElementSetSpecs -> ElementSetSpec ',' '...' ',' ElementSetSpec : {'$1','$5'}.
-
-ElementSetSpec -> Unions : '$1'.
-ElementSetSpec -> 'ALL' Exclusions : {'ALL','$2'}.
-
-Unions -> Intersections : '$1'.
-Unions -> UElems UnionMark IntersectionElements :
- case {'$1','$3'} of
- {{'SingleValue',V1},{'SingleValue',V2}} ->
- {'SingleValue',ordsets:union(to_set(V1),to_set(V2))}
- end.
-
-UElems -> Unions :'$1'.
-
-Intersections -> IntersectionElements :'$1'.
-Intersections -> IElems IntersectionMark IntersectionElements :
- case {'$1','$3'} of
- {{'SingleValue',V1},{'SingleValue',V2}} ->
- {'SingleValue',ordsets:intersection(to_set(V1),to_set(V2))};
- {V1,V2} when list(V1) ->
- V1 ++ [V2];
- {V1,V2} ->
- [V1,V2]
- end.
-%Intersections -> IElems '^' IntersectionElements :{'INTERSECTION','$1','$3'}.
-%Intersections -> IElems 'INTERSECTION' IntersectionElements :{'INTERSECTION','$1','$3'}.
-
-IElems -> Intersections :'$1'.
-
-IntersectionElements -> Elements :'$1'.
-IntersectionElements -> Elems Exclusions :{'$1','$2'}.
-
-Elems -> Elements :'$1'.
-
-Exclusions -> 'EXCEPT' Elements :{'EXCEPT','$2'}.
-
-IntersectionMark -> 'INTERSECTION':'$1'.
-IntersectionMark -> '^':'$1'.
-UnionMark -> 'UNION':'$1'.
-UnionMark -> '|':'$1'.
-
-
-Elements -> SubTypeElements : '$1'.
-%Elements -> ObjectSetElements : '$1'.
-Elements -> '(' ElementSetSpec ')' : '$2'.
-Elements -> ReferencedType : '$1'.
-
-SubTypeElements -> ValueList : {'SingleValue','$1'}. % NOTE it must be a Value
-% The rule above modifyed only because of conflicts
-SubTypeElements -> 'INCLUDES' Type : {'ContainedSubType','$2'}.
-%not lalr1 if this is activated SubTypeElements -> Type : {'TypeConstraint','$1'}.
-SubTypeElements -> LowerEndpoint '..' UpperEndpoint : {'ValueRange',{'$1','$3'}}.
-SubTypeElements -> 'FROM' Constraint : {'PermittedAlphabet','$2'#constraint.c}.
-SubTypeElements -> 'SIZE' Constraint: {'SizeConstraint','$2'#constraint.c}.
-% later will introduce conflicts related to NULL SubTypeElements -> Type : {'TypeConstraint','$1'}.
-SubTypeElements -> 'WITH' 'COMPONENT' Constraint:{'WITH COMPONENT','$3'}.
-SubTypeElements -> 'WITH' 'COMPONENTS' '{' TypeConstraints '}':{'WITH COMPONENTS',{'FullSpecification','$4'}}.
-SubTypeElements -> 'WITH' 'COMPONENTS' '{' '...' ',' TypeConstraints '}' :{'WITH COMPONENTS',{'PartialSpecification','$3'}}.
-
-% inlined above InnerTypeConstraints ::=
-% inlined above SingleTypeConstraint::= Constraint
-% inlined above MultipleTypeConstraints ::= FullSpecification | PartialSpecification
-% inlined above FullSpecification ::= "{" TypeConstraints "}"
-% inlined above PartialSpecification ::= "{" "..." "," TypeConstraints "}"
-% TypeConstraints -> identifier : [{'NamedConstraint',element(3,'$1'),undefined,undefined}]. % is this really meaningful or allowed
-TypeConstraints -> NamedConstraint : ['$1'].
-TypeConstraints -> NamedConstraint ',' TypeConstraints : ['$1'|'$3'].
-TypeConstraints -> identifier : ['$1'].
-TypeConstraints -> identifier ',' TypeConstraints : ['$1'|'$3'].
-
-NamedConstraint -> identifier Constraint PresenceConstraint :{'NamedConstraint',element(3,'$1'),'$2','$3'}.
-NamedConstraint -> identifier Constraint :{'NamedConstraint',element(3,'$1'),'$2',undefined}.
-NamedConstraint -> identifier PresenceConstraint :{'NamedConstraint',element(3,'$1'),undefined,'$2'}.
-
-PresenceConstraint -> 'PRESENT' : 'PRESENT'.
-PresenceConstraint -> 'ABSENT' : 'ABSENT'.
-PresenceConstraint -> 'OPTIONAL' : 'OPTIONAL'.
-
-
-
-LowerEndpoint -> LowerEndValue :'$1'.
-%LowerEndpoint -> LowerEndValue '<':{gt,'$1'}.
-LowerEndpoint -> LowerEndValue '<':('$1'+1).
-
-UpperEndpoint -> UpperEndValue :'$1'.
-%UpperEndpoint -> '<' UpperEndValue :{lt,'$2'}.
-UpperEndpoint -> '<' UpperEndValue :('$2'-1).
-
-LowerEndValue -> Value :'$1'.
-LowerEndValue -> 'MIN' :'MIN'.
-
-UpperEndValue -> Value :'$1'.
-UpperEndValue -> 'MAX' :'MAX'.
-
-
-% X.681
-
-
-% X.681 chap 15
-
-%TypeFromObject -> ReferencedObjects '.' FieldName : {'$1','$3'}.
-TypeFromObject -> typereference '.' FieldName : {'$1','$3'}.
-
-ReferencedObjects -> typereference : '$1'.
-%ReferencedObjects -> ParameterizedObject
-%ReferencedObjects -> DefinedObjectSet
-%ReferencedObjects -> ParameterizedObjectSet
-
-FieldName -> typefieldreference : ['$1'].
-FieldName -> valuefieldreference : ['$1'].
-FieldName -> FieldName '.' FieldName : ['$1' | '$3'].
-
-PrimitiveFieldName -> typefieldreference : '$1'.
-PrimitiveFieldName -> valuefieldreference : '$1'.
-
-%ObjectSetAssignment -> typereference DefinedObjectClass '::=' ObjectSet: null.
-ObjectSetAssignment -> typereference typereference '::=' ObjectSet :
- #typedef{pos=element(2,'$1'),name=element(3,'$1'),typespec={'ObjectSet',element(3,'$2'), '$4'}}.
-ObjectSetAssignment -> typereference typereference '.' typereference '::=' ObjectSet.
-
-ObjectSet -> '{' ElementSetSpecs '}' : '$2'.
-ObjectSet -> '{' '...' '}' : ['EXTENSIONMARK'].
-
-%ObjectSetElements -> Object.
-% ObjectSetElements -> identifier : '$1'.
-%ObjectSetElements -> DefinedObjectSet.
-%ObjectSetElements -> ObjectSetFromObjects.
-%ObjectSetElements -> ParameterizedObjectSet.
-
-%ObjectAssignment -> identifier DefinedObjectClass '::=' Object.
-ObjectAssignment -> ValueAssignment.
-%ObjectAssignment -> identifier typereference '::=' Object.
-%ObjectAssignment -> identifier typereference '.' typereference '::=' Object.
-
-%Object -> DefinedObject: '$1'.
-%Object -> ExternalObjectReference: '$1'.%Object -> DefinedObject: '$1'.
-Object -> typereference '.' identifier: '$1'.%Object -> DefinedObject: '$1'.
-Object -> identifier: '$1'.%Object -> DefinedObject: '$1'.
-
-%Object -> ObjectDefn -> DefaultSyntax: '$1'.
-Object -> '{' FieldSetting ',' FieldSettings '}' : ['$2'|'$4'].
-Object -> '{' FieldSetting '}' :['$2'].
-
-%% For User-friendly notation
-%% Object -> ObjectDefn -> DefinedSyntax
-Object -> '{' '}'.
-Object -> '{' DefinedSyntaxTokens '}'.
-
-% later Object -> ParameterizedObject: '$1'. look in x.683
-
-%DefinedObject -> ExternalObjectReference: '$1'.
-%DefinedObject -> identifier: '$1'.
-
-DefinedObjectClass -> typereference.
-%DefinedObjectClass -> objectclassreference.
-DefinedObjectClass -> ExternalObjectClassReference.
-%DefinedObjectClass -> typereference '.' objectclassreference.
-%%DefinedObjectClass -> UsefulObjectClassReference.
-
-ExternalObjectReference -> typereference '.' identifier.
-ExternalObjectClassReference -> typereference '.' typereference.
-%%ExternalObjectClassReference -> typereference '.' objectclassreference.
-
-ObjectDefn -> DefaultSyntax: '$1'.
-%ObjectDefn -> DefinedSyntax: '$1'.
-
-ObjectFromObject -> ReferencedObjects '.' FieldName : {'ObjectFromObject','$1','$3'}.
-
-% later look in x.683 ParameterizedObject ->
-
-%DefaultSyntax -> '{' '}'.
-%DefaultSyntax -> '{' FieldSettings '}': '$2'.
-DefaultSyntax -> '{' FieldSetting ',' FieldSettings '}': '$2'.
-DefaultSyntax -> '{' FieldSetting '}': '$2'.
-
-FieldSetting -> PrimitiveFieldName Setting: {'$1','$2'}.
-
-FieldSettings -> FieldSetting ',' FieldSettings: ['$1'|'$3'].
-FieldSettings -> FieldSetting ',' FieldSettings: ['$1'|'$3'].
-FieldSettings -> FieldSetting: '$1'.
-
-%DefinedSyntax -> '{' '}'.
-DefinedSyntax -> '{' DefinedSyntaxTokens '}': '$2'.
-
-DefinedSyntaxTokens -> DefinedSyntaxToken: '$1'.
-DefinedSyntaxTokens -> DefinedSyntaxToken DefinedSyntaxTokens: ['$1'|'$2'].
-
-% expanded DefinedSyntaxToken -> Literal: '$1'.
-%DefinedSyntaxToken -> typereference: '$1'.
-DefinedSyntaxToken -> word: '$1'.
-DefinedSyntaxToken -> ',': '$1'.
-DefinedSyntaxToken -> Setting: '$1'.
-%DefinedSyntaxToken -> '$empty': nil .
-
-% Setting ::= Type|Value|ValueSet|Object|ObjectSet
-Setting -> Type: '$1'.
-%Setting -> Value: '$1'.
-%Setting -> ValueNotNull: '$1'.
-Setting -> BuiltinValue: '$1'.
-Setting -> ValueSet: '$1'.
-%Setting -> Object: '$1'.
-%Setting -> ExternalObjectReference.
-Setting -> typereference '.' identifier.
-Setting -> identifier.
-Setting -> ObjectDefn.
-
-Setting -> ObjectSet: '$1'.
-
-
-Erlang code.
-%%-author('[email protected]').
--copyright('Copyright (c) 1991-99 Ericsson Telecom AB').
--vsn('$Revision: /main/release/1 $').
--include("asn1_records.hrl").
-
-to_set(V) when list(V) ->
- ordsets:list_to_set(V);
-to_set(V) ->
- ordsets:list_to_set([V]).
-
-merge_constraints({Rlist,ExtList}) -> % extensionmarker in constraint
- {merge_constraints(Rlist,[],[]),
- merge_constraints(ExtList,[],[])};
-
-merge_constraints(Clist) ->
- merge_constraints(Clist, [], []).
-
-merge_constraints([Ch|Ct],Cacc, Eacc) ->
- NewEacc = case Ch#constraint.e of
- undefined -> Eacc;
- E -> [E|Eacc]
- end,
- merge_constraints(Ct,[fixup_constraint(Ch#constraint.c)|Cacc],NewEacc);
-
-merge_constraints([],Cacc,[]) ->
- lists:flatten(Cacc);
-merge_constraints([],Cacc,Eacc) ->
- lists:flatten(Cacc) ++ [{'Errors',Eacc}].
-
-fixup_constraint(C) ->
- case C of
- {'SingleValue',V} when list(V) ->
- [C,
- {'ValueRange',{lists:min(V),lists:max(V)}}];
- {'PermittedAlphabet',{'SingleValue',V}} when list(V) ->
- V2 = {'SingleValue',
- ordsets:list_to_set(lists:flatten(V))},
- {'PermittedAlphabet',V2};
- {'PermittedAlphabet',{'SingleValue',V}} ->
- V2 = {'SingleValue',[V]},
- {'PermittedAlphabet',V2};
- {'SizeConstraint',Sc} ->
- {'SizeConstraint',fixup_size_constraint(Sc)};
-
- List when list(List) ->
- [fixup_constraint(Xc)||Xc <- List];
- Other ->
- Other
- end.
-
-fixup_size_constraint({'ValueRange',{Lb,Ub}}) ->
- {Lb,Ub};
-fixup_size_constraint({{'ValueRange',R},[]}) ->
- {R,[]};
-fixup_size_constraint({[],{'ValueRange',R}}) ->
- {[],R};
-fixup_size_constraint({{'ValueRange',R1},{'ValueRange',R2}}) ->
- {R1,R2};
-fixup_size_constraint({'SingleValue',[Sv]}) ->
- fixup_size_constraint({'SingleValue',Sv});
-fixup_size_constraint({'SingleValue',L}) when list(L) ->
- ordsets:list_to_set(L);
-fixup_size_constraint({'SingleValue',L}) ->
- {L,L};
-fixup_size_constraint({C1,C2}) ->
- {fixup_size_constraint(C1), fixup_size_constraint(C2)}.
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/lib/asn1/src/asn1ct_parser2.erl b/lib/asn1/src/asn1ct_parser2.erl
index 3891fce8d3..488e4af4e0 100644
--- a/lib/asn1/src/asn1ct_parser2.erl
+++ b/lib/asn1/src/asn1ct_parser2.erl
@@ -20,7 +20,7 @@
%%
-module(asn1ct_parser2).
--export([parse/1]).
+-export([parse/2,format_error/1]).
-include("asn1_records.hrl").
%% Only used internally within this module.
@@ -28,26 +28,34 @@
-record(constraint, {c,e}).
-record(identifier, {pos,val}).
-%% parse all types in module
-parse(Tokens) ->
- case catch parse_ModuleDefinition(Tokens) of
- {'EXIT',Reason} ->
- {error,{{undefined,get(asn1_module),
- [internal,error,'when',parsing,module,definition,Reason]},
- hd(Tokens)}};
- {asn1_error,Reason} ->
- {error,{Reason,hd(Tokens)}};
- {ModuleDefinition,Rest1} ->
- {Types,Rest2} = parse_AssignmentList(Rest1),
- clean_process_dictionary(),
- case Rest2 of
- [{'END',_}|_Rest3] ->
- {ok,ModuleDefinition#module{typeorval = Types}};
- _ ->
- {error,{{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'END']},
- hd(Rest2)}}
- end
+parse(File0, Tokens0) ->
+ try do_parse(Tokens0) of
+ {ok,#module{}}=Result ->
+ Result
+ catch
+ throw:{asn1_error,Fun} when is_function(Fun, 0) ->
+ handle_parse_error(File0, Fun());
+ throw:{asn1_error,{parse_error,Tokens}} ->
+ handle_parse_error(File0, Tokens)
+ after
+ clean_process_dictionary()
+ end.
+
+handle_parse_error(File0, [Token|_]) ->
+ File = filename:basename(File0),
+ Line = get_line(Token),
+ Error = {structured_error,{File,Line},?MODULE,
+ {syntax_error,get_token(Token)}},
+ {error,[Error]}.
+
+do_parse(Tokens0) ->
+ {ModuleDefinition,Tokens1} = parse_ModuleDefinition(Tokens0),
+ {Types,Tokens2} = parse_AssignmentList(Tokens1),
+ case Tokens2 of
+ [{'END',_}|_Rest3] ->
+ {ok,ModuleDefinition#module{typeorval=Types}};
+ _ ->
+ parse_error(Tokens2)
end.
clean_process_dictionary() ->
@@ -57,6 +65,11 @@ clean_process_dictionary() ->
_ = erase(extensiondefault),
ok.
+format_error({syntax_error,Token}) when is_atom(Token) ->
+ io_lib:format("syntax error before: '~s'", [Token]);
+format_error({syntax_error,Token}) ->
+ io_lib:format("syntax error before: '~p'", [Token]).
+
parse_ModuleDefinition([{typereference,L1,ModuleIdentifier}|Rest0]) ->
put(asn1_module,ModuleIdentifier),
{_DefinitiveIdentifier,Rest02} =
@@ -70,9 +83,7 @@ parse_ModuleDefinition([{typereference,L1,ModuleIdentifier}|Rest0]) ->
[{'DEFINITIONS',_}|Rest03] ->
Rest03;
_ ->
- throw({asn1_error,{get_line(hd(Rest02)),get(asn1_module),
- [got,get_token(hd(Rest02)),
- expected,'DEFINITIONS']}})
+ parse_error(Rest02)
end,
{TagDefault,Rest2} =
case Rest of
@@ -104,12 +115,11 @@ parse_ModuleDefinition([{typereference,L1,ModuleIdentifier}|Rest0]) ->
extensiondefault = ExtensionDefault,
exports = Exports,
imports = {imports, Imports}}, Rest6};
- _ -> throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
- [got,get_token(hd(Rest3)),expected,"::= BEGIN"]}})
+ _ ->
+ parse_error(Rest3)
end;
parse_ModuleDefinition(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,typereference]}}).
+ parse_error(Tokens).
parse_Exports([{'EXPORTS',_L1},{';',_L2}|Rest]) ->
{{exports,[]},Rest};
@@ -122,8 +132,7 @@ parse_Exports([{'EXPORTS',_L1}|Rest]) ->
[{';',_}|Rest3] ->
{{exports,SymbolList},Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,';']}})
+ parse_error(Rest2)
end;
parse_Exports(Rest) ->
{{exports,all},Rest}.
@@ -137,29 +146,25 @@ parse_SymbolList(Tokens,Acc) ->
[{',',_L1}|Rest2] ->
parse_SymbolList(Rest2,[Symbol|Acc]);
Rest2 ->
- {lists:reverse([Symbol|Acc]),Rest2}
+ {lists:reverse(Acc, [Symbol]),Rest2}
end.
parse_Symbol(Tokens) ->
parse_Reference(Tokens).
parse_Reference([{typereference,L1,TrefName},{'{',_L2},{'}',_L3}|Rest]) ->
-% {Tref,Rest};
{tref2Exttref(L1,TrefName),Rest};
parse_Reference([Tref1 = {typereference,_,_},{'.',_},Tref2 = {typereference,_,_},
{'{',_L2},{'}',_L3}|Rest]) ->
-% {{Tref1,Tref2},Rest};
{{tref2Exttref(Tref1),tref2Exttref(Tref2)},Rest};
parse_Reference([Tref = {typereference,_L1,_TrefName}|Rest]) ->
{tref2Exttref(Tref),Rest};
-parse_Reference([Vref = {identifier,_L1,_VName},{'{',_L2},{'}',_L3}|Rest]) ->
+parse_Reference([#identifier{}=Vref,{'{',_L2},{'}',_L3}|Rest]) ->
{identifier2Extvalueref(Vref),Rest};
-parse_Reference([Vref = {identifier,_L1,_VName}|Rest]) ->
+parse_Reference([#identifier{}=Vref|Rest]) ->
{identifier2Extvalueref(Vref),Rest};
parse_Reference(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- [typereference,identifier]]}}).
+ parse_error(Tokens).
parse_Imports([{'IMPORTS',_L1},{';',_L2}|Rest]) ->
{{imports,[]},Rest};
@@ -168,9 +173,8 @@ parse_Imports([{'IMPORTS',_L1}|Rest]) ->
case Rest2 of
[{';',_L2}|Rest3] ->
{{imports,SymbolsFromModuleList},Rest3};
- Rest3 ->
- throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
- [got,get_token(hd(Rest3)),expected,';']}})
+ _ ->
+ parse_error(Rest2)
end;
parse_Imports(Tokens) ->
{{imports,[]},Tokens}.
@@ -180,11 +184,12 @@ parse_SymbolsFromModuleList(Tokens) ->
parse_SymbolsFromModuleList(Tokens,Acc) ->
{SymbolsFromModule,Rest} = parse_SymbolsFromModule(Tokens),
- case (catch parse_SymbolsFromModule(Rest)) of
+ try parse_SymbolsFromModule(Rest) of
{Sl,_Rest2} when is_record(Sl,'SymbolsFromModule') ->
- parse_SymbolsFromModuleList(Rest,[SymbolsFromModule|Acc]);
- _ ->
- {lists:reverse([SymbolsFromModule|Acc]),Rest}
+ parse_SymbolsFromModuleList(Rest, [SymbolsFromModule|Acc])
+ catch
+ throw:{asn1_error,_} ->
+ {lists:reverse(Acc, [SymbolsFromModule]),Rest}
end.
parse_SymbolsFromModule(Tokens) ->
@@ -198,169 +203,154 @@ parse_SymbolsFromModule(Tokens) ->
end,
{SymbolList,Rest} = parse_SymbolList(Tokens),
case Rest of
- [{'FROM',_L1},Tref = {typereference,_,Name},Ref={identifier,_L2,_Id},C={',',_}|Rest2] ->
- NewSymbolList = lists:map(SetRefModuleName(Name),SymbolList),
+ [{'FROM',_L1},{typereference,_,Name}=Tref|
+ [#identifier{},{',',_}|_]=Rest2] ->
+ NewSymbolList = lists:map(SetRefModuleName(Name), SymbolList),
{#'SymbolsFromModule'{symbols=NewSymbolList,
- module=tref2Exttref(Tref)},[Ref,C|Rest2]};
+ module=tref2Exttref(Tref)},Rest2};
%% This a special case when there is only one Symbol imported
%% from the next module. No other way to distinguish Ref from
%% a part of the GlobalModuleReference of Name.
- [{'FROM',_L1},Tref = {typereference,_,Name},Ref = {identifier,_L2,_Id},From = {'FROM',_}|Rest2] ->
- NewSymbolList = lists:map(SetRefModuleName(Name),SymbolList),
+ [{'FROM',_L1},{typereference,_,Name}=Tref|
+ [#identifier{},{'FROM',_}|_]=Rest2] ->
+ NewSymbolList = lists:map(SetRefModuleName(Name), SymbolList),
{#'SymbolsFromModule'{symbols=NewSymbolList,
- module=tref2Exttref(Tref)},[Ref,From|Rest2]};
- [{'FROM',_L1},Tref = {typereference,_,Name},{identifier,_L2,_Id}|Rest2] ->
- NewSymbolList = lists:map(SetRefModuleName(Name),SymbolList),
+ module=tref2Exttref(Tref)},Rest2};
+ [{'FROM',_L1},{typereference,_,Name}=Tref,#identifier{}|Rest2] ->
+ NewSymbolList = lists:map(SetRefModuleName(Name), SymbolList),
{#'SymbolsFromModule'{symbols=NewSymbolList,
module=tref2Exttref(Tref)},Rest2};
- [{'FROM',_L1},Tref = {typereference,_,Name},Brace = {'{',_}|Rest2] ->
- {_ObjIdVal,Rest3} = parse_ObjectIdentifierValue([Brace|Rest2]), % value not used yet, fix me
- NewSymbolList = lists:map(SetRefModuleName(Name),SymbolList),
+ [{'FROM',_L1},{typereference,_,Name}=Tref|[{'{',_}|_]=Rest2] ->
+ {_ObjIdVal,Rest3} = parse_ObjectIdentifierValue(Rest2), % value not used yet, fix me
+ NewSymbolList = lists:map(SetRefModuleName(Name), SymbolList),
{#'SymbolsFromModule'{symbols=NewSymbolList,
module=tref2Exttref(Tref)},Rest3};
- [{'FROM',_L1},Tref = {typereference,_,Name}|Rest2] ->
- NewSymbolList = lists:map(SetRefModuleName(Name),SymbolList),
+ [{'FROM',_L1},{typereference,_,Name}=Tref|Rest2] ->
+ NewSymbolList = lists:map(SetRefModuleName(Name), SymbolList),
{#'SymbolsFromModule'{symbols=NewSymbolList,
module=tref2Exttref(Tref)},Rest2};
_ ->
- throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
- [got,get_token(hd(Rest)),expected,
- ['FROM typerefernece identifier ,',
- 'FROM typereference identifier',
- 'FROM typereference {',
- 'FROM typereference']]}})
+ parse_error(Rest)
end.
parse_ObjectIdentifierValue([{'{',_}|Rest]) ->
parse_ObjectIdentifierValue(Rest,[]).
-parse_ObjectIdentifierValue([{number,_,Num}|Rest],Acc) ->
+parse_ObjectIdentifierValue([{number,_,Num}|Rest], Acc) ->
parse_ObjectIdentifierValue(Rest,[Num|Acc]);
-parse_ObjectIdentifierValue([{identifier,_,Id},{'(',_}, {number,_,Num}, {')',_}|Rest],Acc) ->
+parse_ObjectIdentifierValue([#identifier{val=Id},{'(',_},{number,_,Num},{')',_}|Rest], Acc) ->
parse_ObjectIdentifierValue(Rest,[{'NamedNumber',Id,Num}|Acc]);
-parse_ObjectIdentifierValue([{identifier,_,Id},{'(',_}, {identifier,_,Id2}, {')',_}|Rest],Acc) ->
+parse_ObjectIdentifierValue([#identifier{val=Id},{'(',_},#identifier{val=Id2},{')',_}|Rest], Acc) ->
parse_ObjectIdentifierValue(Rest,[{'NamedNumber',Id,Id2}|Acc]);
-parse_ObjectIdentifierValue([{identifier,_,Id},{'(',_}, {typereference,_,Tref},{'.',_},{identifier,_,Id2}, {')',_}|Rest],Acc) ->
- parse_ObjectIdentifierValue(Rest,[{'NamedNumber',Id,{'ExternalValue',Tref,Id2}}|Acc]);
-parse_ObjectIdentifierValue([Id = {identifier,_,_}|Rest],Acc) ->
- parse_ObjectIdentifierValue(Rest,[identifier2Extvalueref(Id)|Acc]);
-parse_ObjectIdentifierValue([{'}',_}|Rest],Acc) ->
+parse_ObjectIdentifierValue([#identifier{val=Id},{'(',_},{typereference,_,Tref},{'.',_},#identifier{val=Id2}, {')',_}|Rest], Acc) ->
+ parse_ObjectIdentifierValue(Rest, [{'NamedNumber',Id,{'ExternalValue',Tref,Id2}}|Acc]);
+parse_ObjectIdentifierValue([#identifier{}=Id|Rest], Acc) ->
+ parse_ObjectIdentifierValue(Rest, [identifier2Extvalueref(Id)|Acc]);
+parse_ObjectIdentifierValue([{'}',_}|Rest], Acc) ->
{lists:reverse(Acc),Rest};
-parse_ObjectIdentifierValue([H|_T],_Acc) ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,
- ['{ some of the following }',number,'identifier ( number )',
- 'identifier ( identifier )',
- 'identifier ( typereference.identifier)',identifier]]}}).
+parse_ObjectIdentifierValue(Tokens, _Acc) ->
+ parse_error(Tokens).
-parse_AssignmentList(Tokens = [{'END',_}|_Rest]) ->
- {[],Tokens};
-parse_AssignmentList(Tokens = [{'$end',_}|_Rest]) ->
- {[],Tokens};
parse_AssignmentList(Tokens) ->
- parse_AssignmentList(Tokens,[]).
+ parse_AssignmentList(Tokens, []).
-parse_AssignmentList(Tokens= [{'END',_}|_Rest],Acc) ->
+parse_AssignmentList([{'END',_}|_]=Tokens, Acc) ->
{lists:reverse(Acc),Tokens};
-parse_AssignmentList(Tokens= [{'$end',_}|_Rest],Acc) ->
+parse_AssignmentList([{'$end',_}|_]=Tokens, Acc) ->
{lists:reverse(Acc),Tokens};
-parse_AssignmentList(Tokens,Acc) ->
- case (catch parse_Assignment(Tokens)) of
- {'EXIT',Reason} ->
- exit(Reason);
- {asn1_error,R} ->
-% [H|T] = Tokens,
- throw({error,{R,hd(Tokens)}});
- {Assignment,Rest} ->
- parse_AssignmentList(Rest,[Assignment|Acc])
- end.
-
-parse_Assignment(Tokens) ->
- Flist = [fun parse_TypeAssignment/1,
- fun parse_ValueAssignment/1,
- fun parse_ObjectClassAssignment/1,
- fun parse_ObjectAssignment/1,
- fun parse_ObjectSetAssignment/1,
- fun parse_ParameterizedAssignment/1,
+parse_AssignmentList(Tokens0, Acc) ->
+ {Assignment,Tokens} = parse_Assignment(Tokens0),
+ parse_AssignmentList(Tokens, [Assignment|Acc]).
+
+parse_Assignment([{typereference,L1,Name},{'::=',_}|Tokens0]) ->
+ %% 1) Type ::= TypeDefinition
+ %% 2) CLASS-NAME ::= CLASS {...}
+ Flist = [{type,fun parse_Type/1},
+ {class,fun parse_ObjectClass/1}],
+ case parse_or_tag(Tokens0, Flist) of
+ {{type,Type},Tokens} ->
+ %% TypeAssignment
+ {#typedef{pos=L1,name=Name,typespec=Type},Tokens};
+ {{class,Type},Tokens} ->
+ %% ObjectClassAssignment
+ {#classdef{pos=L1,name=Name,module=resolve_module(Type),
+ typespec=Type},Tokens}
+ end;
+parse_Assignment([{typereference,_,_},{'{',_}|_]=Tokens) ->
+ %% 1) Type{...} ::= ...
+ %% 2) ValueSet{...} Type ::= ...
+ %% ObjectSet{...} CLASS-NAME ::= CLASS {...}
+ %% 3) CLASS-NAME{...} ::= CLASS {...}
+ %% A parameterized value set and and a parameterized object set
+ %% cannot be distinguished from each other without type information.
+ Flist = [fun parse_ParameterizedTypeAssignment/1,
+ fun parse_ParameterizedValueSetTypeAssignment/1,
+ fun parse_ParameterizedObjectClassAssignment/1],
+ parse_or(Tokens, Flist);
+parse_Assignment([{typereference,_,_}|_]=Tokens) ->
+ %% 1) ObjectSet CLASS-NAME ::= ...
+ %% 2) ValueSet Type ::= ...
+ Flist = [fun parse_ObjectSetAssignment/1,
fun parse_ValueSetTypeAssignment/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- {asn1_assignment_error,Reason} ->
- throw({asn1_error,Reason});
- Result ->
- Result
- end.
-
+ parse_or(Tokens, Flist);
+parse_Assignment([#identifier{},{'{',_}|_]=Tokens) ->
+ %% 1) value{...} Type ::= ...
+ %% 2) object{...} CLASS-NAME ::= ...
+ Flist = [fun parse_ParameterizedValueAssignment/1,
+ fun parse_ParameterizedObjectAssignment/1],
+ parse_or(Tokens, Flist);
+parse_Assignment([#identifier{}|_]=Tokens) ->
+ %% 1) value Type ::= ...
+ %% 2) object CLASS-NAME ::= ...
+ Flist = [fun parse_ValueAssignment/1,
+ fun parse_ObjectAssignment/1],
+ parse_or(Tokens, Flist);
+parse_Assignment(Tokens) ->
+ parse_error(Tokens).
parse_or(Tokens,Flist) ->
parse_or(Tokens,Flist,[]).
-parse_or(_Tokens,[],ErrList) ->
- case ErrList of
- [] ->
- throw({asn1_error,{parse_or,ErrList}});
- L when is_list(L) ->
- %% chose to throw 1) the error with the highest line no,
- %% 2) the last error which is not a asn1_assignment_error or
- %% 3) the last error.
- throw(prioritize_error(ErrList))
+parse_or(Tokens, [Fun|Funs], ErrList) when is_function(Fun, 1) ->
+ try Fun(Tokens) of
+ {_,Rest}=Result when is_list(Rest) ->
+ Result
+ catch
+ throw:{asn1_error,Error} ->
+ parse_or(Tokens, Funs, [Error|ErrList])
end;
-parse_or(Tokens,[Fun|Frest],ErrList) ->
- case (catch Fun(Tokens)) of
- Exit = {'EXIT',_Reason} ->
- parse_or(Tokens,Frest,[Exit|ErrList]);
- AsnErr = {asn1_error,_} ->
- parse_or(Tokens,Frest,[AsnErr|ErrList]);
- AsnAssErr = {asn1_assignment_error,_} ->
- parse_or(Tokens,Frest,[AsnAssErr|ErrList]);
- Result = {_,L} when is_list(L) ->
- Result;
- Error ->
- parse_or(Tokens,Frest,[Error|ErrList])
- end.
-
-parse_or_tag(Tokens,Flist) ->
- parse_or_tag(Tokens,Flist,[]).
-
-parse_or_tag(_Tokens,[],ErrList) ->
- case ErrList of
- [] ->
- throw({asn1_error,{parse_or_tag,ErrList}});
- L when is_list(L) ->
- %% chose to throw 1) the error with the highest line no,
- %% 2) the last error which is not a asn1_assignment_error or
- %% 3) the last error.
- throw(prioritize_error(ErrList))
+parse_or(_Tokens, [], ErrList) ->
+ throw({asn1_error,fun() -> prioritize_error(ErrList) end}).
+
+parse_or_tag(Tokens, Flist) ->
+ parse_or_tag(Tokens, Flist, []).
+
+parse_or_tag(Tokens, [{Tag,Fun}|Funs], ErrList) when is_function(Fun, 1) ->
+ try Fun(Tokens) of
+ {Parsed,Rest} when is_list(Rest) ->
+ {{Tag,Parsed},Rest}
+ catch
+ throw:{asn1_error,Error} ->
+ parse_or_tag(Tokens, Funs, [Error|ErrList])
end;
-parse_or_tag(Tokens,[{Tag,Fun}|Frest],ErrList) when is_function(Fun) ->
- case (catch Fun(Tokens)) of
- Exit = {'EXIT',_Reason} ->
- parse_or_tag(Tokens,Frest,[Exit|ErrList]);
- AsnErr = {asn1_error,_} ->
- parse_or_tag(Tokens,Frest,[AsnErr|ErrList]);
- AsnAssErr = {asn1_assignment_error,_} ->
- parse_or_tag(Tokens,Frest,[AsnAssErr|ErrList]);
- {ParseRes,Rest} when is_list(Rest) ->
- {{Tag,ParseRes},Rest};
- Error ->
- parse_or_tag(Tokens,Frest,[Error|ErrList])
- end.
+parse_or_tag(_Tokens, [], ErrList) ->
+ throw({asn1_error,fun() -> prioritize_error(ErrList) end}).
+
+prioritize_error(Errors0) ->
+ Errors1 = prioritize_error_1(Errors0),
+ Errors2 = [{length(L),L} || L <- Errors1],
+ Errors = lists:sort(Errors2),
+ [Res|_] = [L || {_,L} <- Errors],
+ Res.
+
+prioritize_error_1([F|T]) when is_function(F, 0) ->
+ [F()|prioritize_error_1(T)];
+prioritize_error_1([{parse_error,Tokens}|T]) ->
+ [Tokens|prioritize_error_1(T)];
+prioritize_error_1([]) ->
+ [].
-parse_TypeAssignment([{typereference,L1,Tref},{'::=',_}|Rest]) ->
- {Type,Rest2} = parse_Type(Rest),
- {#typedef{pos=L1,name=Tref,typespec=Type},Rest2};
-parse_TypeAssignment([H1,H2|_Rest]) ->
- throw({asn1_assignment_error,{get_line(H1),get(asn1_module),
- [got,[get_token(H1),get_token(H2)], expected,
- typereference,'::=']}});
-parse_TypeAssignment([H|_T]) ->
- throw({asn1_assignment_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,
- typereference]}}).
%% parse_Type(Tokens) -> Ret
%%
@@ -370,9 +360,8 @@ parse_TypeAssignment([H|_T]) ->
%%
parse_Type(Tokens) ->
{Tag,Rest3} = case Tokens of
- [Lbr= {'[',_}|Rest] ->
- parse_Tag([Lbr|Rest]);
- Rest-> {[],Rest}
+ [{'[',_}|_] -> parse_Tag(Tokens);
+ _ -> {[],Tokens}
end,
{Tag2,Rest4} = case Rest3 of
[{'IMPLICIT',_}|Rest31] when is_record(Tag,tag)->
@@ -384,31 +373,17 @@ parse_Type(Tokens) ->
Rest31 ->
{Tag,Rest31}
end,
- Flist = [fun parse_BuiltinType/1,fun parse_ReferencedType/1,fun parse_TypeWithConstraint/1],
- {Type,Rest5} = case (catch parse_or(Rest4,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_Reason} ->
- throw(AsnErr);
- Result ->
- Result
- end,
- case hd(Rest5) of
- {'(',_} ->
+ Flist = [fun parse_BuiltinType/1,
+ fun parse_ReferencedType/1,
+ fun parse_TypeWithConstraint/1],
+ {Type,Rest5} = parse_or(Rest4, Flist),
+ case Rest5 of
+ [{'(',_}|_] ->
{Constraints,Rest6} = parse_Constraints(Rest5),
- if is_record(Type,type) ->
- {Type#type{constraint=merge_constraints(Constraints),
- tag=Tag2},Rest6};
- true ->
- {#type{def=Type,constraint=merge_constraints(Constraints),
- tag=Tag2},Rest6}
- end;
- _ ->
- if is_record(Type,type) ->
- {Type#type{tag=Tag2},Rest5};
- true ->
- {#type{def=Type,tag=Tag2},Rest5}
- end
+ {Type#type{tag=Tag2,
+ constraint=merge_constraints(Constraints)},Rest6};
+ [_|_] ->
+ {Type#type{tag=Tag2},Rest5}
end.
parse_BuiltinType([{'BIT',_},{'STRING',_}|Rest]) ->
@@ -419,11 +394,10 @@ parse_BuiltinType([{'BIT',_},{'STRING',_}|Rest]) ->
[{'}',_}|Rest4] ->
{#type{def={'BIT STRING',NamedNumberList}},Rest4};
_ ->
- throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
- [got,get_token(hd(Rest3)),expected,'}']}})
+ parse_error(Rest3)
end;
_ ->
- {{'BIT STRING',[]},Rest}
+ {#type{def={'BIT STRING',[]}},Rest}
end;
parse_BuiltinType([{'BOOLEAN',_}|Rest]) ->
{#type{def='BOOLEAN'},Rest};
@@ -435,41 +409,33 @@ parse_BuiltinType([{'CHARACTER',_},{'STRING',_}|Rest]) ->
{#type{def='CHARACTER STRING'},Rest};
parse_BuiltinType([{'CHOICE',_},{'{',_}|Rest]) ->
- {AlternativeTypeLists,Rest2} = parse_AlternativeTypeLists(Rest),
- AlternativeTypeLists1 =
- lists:filter(fun(#'ExtensionAdditionGroup'{}) -> false;
- ('ExtensionAdditionGroupEnd') -> false;
- (_) -> true
- end,AlternativeTypeLists),
+ {L0,Rest2} = parse_AlternativeTypeLists(Rest),
case Rest2 of
[{'}',_}|Rest3] ->
- AlternativeTypeLists2 =
- case {[Ext||Ext = #'EXTENSIONMARK'{} <- AlternativeTypeLists1],
- get(extensiondefault)} of
- {[],'IMPLIED'} -> AlternativeTypeLists1 ++ [#'EXTENSIONMARK'{}];
- _ -> AlternativeTypeLists1
+ NeedExt = not lists:keymember('EXTENSIONMARK', 1, L0) andalso
+ get(extensiondefault) =:= 'IMPLIED',
+ L = case NeedExt of
+ true ->
+ L0 ++ [#'EXTENSIONMARK'{}];
+ false ->
+ L0
end,
-
- {#type{def={'CHOICE',AlternativeTypeLists2}},Rest3};
+ {#type{def={'CHOICE',L}},Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'}']}})
+ parse_error(Rest2)
end;
parse_BuiltinType([{'EMBEDDED',_},{'PDV',_}|Rest]) ->
{#type{def='EMBEDDED PDV'},Rest};
parse_BuiltinType([{'ENUMERATED',_},{'{',_}|Rest]) ->
- {Enumerations,Rest2} = parse_Enumerations(Rest,get(extensiondefault)),
+ {Enumerations,Rest2} = parse_Enumerations(Rest),
case Rest2 of
[{'}',_}|Rest3] ->
{#type{def={'ENUMERATED',Enumerations}},Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'}']}})
+ parse_error(Rest2)
end;
parse_BuiltinType([{'EXTERNAL',_}|Rest]) ->
{#type{def='EXTERNAL'},Rest};
-
-% InstanceOfType
parse_BuiltinType([{'INSTANCE',_},{'OF',_}|Rest]) ->
{DefinedObjectClass,Rest2} = parse_DefinedObjectClass(Rest),
case Rest2 of
@@ -480,9 +446,6 @@ parse_BuiltinType([{'INSTANCE',_},{'OF',_}|Rest]) ->
_ ->
{#type{def={'INSTANCE OF',DefinedObjectClass,[]}},Rest2}
end;
-
-% parse_BuiltinType(Tokens) ->
-
parse_BuiltinType([{'INTEGER',_}|Rest]) ->
case Rest of
[{'{',_}|Rest2] ->
@@ -491,17 +454,13 @@ parse_BuiltinType([{'INTEGER',_}|Rest]) ->
[{'}',_}|Rest4] ->
{#type{def={'INTEGER',NamedNumberList}},Rest4};
_ ->
- throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
- [got,get_token(hd(Rest3)),expected,'}']}})
+ parse_error(Rest3)
end;
_ ->
{#type{def='INTEGER'},Rest}
end;
parse_BuiltinType([{'NULL',_}|Rest]) ->
{#type{def='NULL'},Rest};
-
-% ObjectClassFieldType fix me later
-
parse_BuiltinType([{'OBJECT',_},{'IDENTIFIER',_}|Rest]) ->
{#type{def='OBJECT IDENTIFIER'},Rest};
parse_BuiltinType([{'OCTET',_},{'STRING',_}|Rest]) ->
@@ -529,18 +488,14 @@ parse_BuiltinType([{'SEQUENCE',_},{'{',_},{'...',Line},{'!',_}|Rest]) ->
parse_ComponentTypeLists2(Rest2,[#'EXTENSIONMARK'{pos=Line}]),
case Rest3 of
[{'}',_}|Rest4] ->
- {#type{def=#'SEQUENCE'{components=ComponentTypeLists}},Rest4};
+ {#type{def=#'SEQUENCE'{components=ComponentTypeLists}},Rest4};
_ ->
- throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
- [got,get_token(hd(Rest3)),expected,'}']}})
+ parse_error(Rest3)
end
-% _ -> % Seq case 4,17-19,23-26 will fail here
-% throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
-% [got,get_token(hd(Rest2)),expected,'}']}})
end;
parse_BuiltinType([{'SEQUENCE',_},{'{',_}|Rest]) ->
{ComponentTypeLists,Rest2} = parse_ComponentTypeLists(Rest),
- case Rest2 of
+ case Rest2 of
[{'}',_}|Rest3] ->
ComponentTypeLists2 =
case {[Ext||Ext = #'EXTENSIONMARK'{} <- ComponentTypeLists],
@@ -551,25 +506,19 @@ parse_BuiltinType([{'SEQUENCE',_},{'{',_}|Rest]) ->
{#type{def=#'SEQUENCE'{components = ComponentTypeLists2}},
Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'}']}})
+ parse_error(Rest2)
end;
-
-parse_BuiltinType([{'SEQUENCE',_},{'OF',_},Id={identifier,_,_},Lt={'<',_}|Rest]) ->
-%% TODO: take care of the identifier for something useful
- {Type,Rest2} = parse_SelectionType([Id,Lt|Rest]),
- {#type{def={'SEQUENCE OF',#type{def=Type,tag=[]}}},Rest2};
-
-parse_BuiltinType([{'SEQUENCE',_},{'OF',_},{identifier,_,_} |Rest]) ->
+parse_BuiltinType([{'SEQUENCE',_},{'OF',_}|
+ [#identifier{},{'<',_}|_]=Tokens0]) ->
+ {Type,Tokens} = parse_SelectionType(Tokens0),
+ {#type{def={'SEQUENCE OF',Type}},Tokens};
+parse_BuiltinType([{'SEQUENCE',_},{'OF',_},#identifier{} |Rest]) ->
%% TODO: take care of the identifier for something useful
{Type,Rest2} = parse_Type(Rest),
{#type{def={'SEQUENCE OF',Type}},Rest2};
-
parse_BuiltinType([{'SEQUENCE',_},{'OF',_}|Rest]) ->
{Type,Rest2} = parse_Type(Rest),
{#type{def={'SEQUENCE OF',Type}},Rest2};
-
-
parse_BuiltinType([{'SET',_},{'{',_},{'...',Line},{'}',_}|Rest]) ->
{#type{def=#'SET'{components=[#'EXTENSIONMARK'{pos = Line}]}},Rest};
parse_BuiltinType([{'SET',_},{'{',_},{'...',Line},{'!',_}|Rest]) ->
@@ -581,12 +530,18 @@ parse_BuiltinType([{'SET',_},{'{',_},{'...',Line},{'!',_}|Rest]) ->
val = ExceptionIdentification}]}},
Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'}']}})
+ {ComponentTypeLists,Rest3}=
+ parse_ComponentTypeLists2(Rest2,[#'EXTENSIONMARK'{pos=Line}]),
+ case Rest3 of
+ [{'}',_}|Rest4] ->
+ {#type{def=#'SET'{components=ComponentTypeLists}},Rest4};
+ _ ->
+ parse_error(Rest3)
+ end
end;
parse_BuiltinType([{'SET',_},{'{',_}|Rest]) ->
{ComponentTypeLists,Rest2} = parse_ComponentTypeLists(Rest),
- case Rest2 of
+ case Rest2 of
[{'}',_}|Rest3] ->
ComponentTypeLists2 =
case {[Ext||Ext = #'EXTENSIONMARK'{} <- ComponentTypeLists],
@@ -597,184 +552,128 @@ parse_BuiltinType([{'SET',_},{'{',_}|Rest]) ->
{#type{def=#'SET'{components = ComponentTypeLists2}},
Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'}']}})
+ parse_error(Rest2)
end;
-
-parse_BuiltinType([{'SET',_},{'OF',_},Id={identifier,_,_},Lt={'<',_}|Rest]) ->
-%% TODO: take care of the identifier for something useful
- {Type,Rest2} = parse_SelectionType([Id,Lt|Rest]),
- {#type{def={'SET OF',#type{def=Type,tag=[]}}},Rest2};
-
-
-parse_BuiltinType([{'SET',_},{'OF',_},{identifier,_,_}|Rest]) ->
+parse_BuiltinType([{'SET',_},{'OF',_}|
+ [#identifier{},{'<',_}|_]=Tokens0]) ->
+ {Type,Tokens} = parse_SelectionType(Tokens0),
+ {#type{def={'SET OF',Type}},Tokens};
+parse_BuiltinType([{'SET',_},{'OF',_},#identifier{}|Rest]) ->
%%TODO: take care of the identifier for something useful
{Type,Rest2} = parse_Type(Rest),
{#type{def={'SET OF',Type}},Rest2};
-
parse_BuiltinType([{'SET',_},{'OF',_}|Rest]) ->
{Type,Rest2} = parse_Type(Rest),
{#type{def={'SET OF',Type}},Rest2};
-
-%% The so called Useful types
parse_BuiltinType([{'GeneralizedTime',_}|Rest]) ->
{#type{def='GeneralizedTime'},Rest};
parse_BuiltinType([{'UTCTime',_}|Rest]) ->
{#type{def='UTCTime'},Rest};
parse_BuiltinType([{'ObjectDescriptor',_}|Rest]) ->
{#type{def='ObjectDescriptor'},Rest};
-
-%% For compatibility with old standard
-parse_BuiltinType([{'ANY',_},{'DEFINED',_},{'BY',_},{identifier,_,Id}|Rest]) ->
+parse_BuiltinType([{'ANY',_},{'DEFINED',_},{'BY',_},#identifier{val=Id}|Rest]) ->
+ %% For compatibility with the old standard.
{#type{def={'ANY_DEFINED_BY',Id}},Rest};
parse_BuiltinType([{'ANY',_}|Rest]) ->
+ %% For compatibility with the old standard.
{#type{def='ANY'},Rest};
-
parse_BuiltinType(Tokens) ->
parse_ObjectClassFieldType(Tokens).
-% throw({asn1_error,unhandled_type}).
-parse_TypeWithConstraint([{'SEQUENCE',_},Lpar = {'(',_}|Rest]) ->
- {Constraint,Rest2} = parse_Constraint([Lpar|Rest]),
+parse_TypeWithConstraint([{'SEQUENCE',_}|[{'(',_}|_]=Rest0]) ->
+ {Constraint,Rest2} = parse_Constraint(Rest0),
Rest4 = case Rest2 of
- [{'OF',_}, {identifier,_,_Id}|Rest3] ->
+ [{'OF',_},#identifier{}|Rest3] ->
%%% TODO: make some use of the identifier, maybe useful in the XML mapping
Rest3;
[{'OF',_}|Rest3] ->
Rest3;
_ ->
- throw({asn1_error,
- {get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'OF']}})
+ parse_error(Rest2)
end,
{Type,Rest5} = parse_Type(Rest4),
{#type{def = {'SEQUENCE OF',Type},
constraint = merge_constraints([Constraint])},Rest5};
-parse_TypeWithConstraint([{'SEQUENCE',_},{'SIZE',_},Lpar = {'(',_}|Rest]) ->
- {Constraint,Rest2} = parse_Constraint([Lpar|Rest]),
+parse_TypeWithConstraint([{'SEQUENCE',_},{'SIZE',_}|[{'(',_}|_]=Rest0]) ->
+ {Constraint,Rest2} = parse_Constraint(Rest0),
#constraint{c=C} = Constraint,
- Constraint2 = Constraint#constraint{c={'SizeConstraint',C}},
+ Constraint2 = Constraint#constraint{c={element_set,{'SizeConstraint',C},
+ none}},
Rest4 = case Rest2 of
- [{'OF',_}, {identifier,_,_Id}|Rest3] ->
+ [{'OF',_},#identifier{}|Rest3] ->
%%% TODO: make some use of the identifier, maybe useful in the XML mapping
Rest3;
[{'OF',_}|Rest3] ->
Rest3;
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'OF']}})
+ parse_error(Rest2)
end,
{Type,Rest5} = parse_Type(Rest4),
{#type{def = {'SEQUENCE OF',Type}, constraint = merge_constraints([Constraint2])},Rest5};
-parse_TypeWithConstraint([{'SET',_},Lpar = {'(',_}|Rest]) ->
- {Constraint,Rest2} = parse_Constraint([Lpar|Rest]),
+parse_TypeWithConstraint([{'SET',_}|[{'(',_}|_]=Rest0]) ->
+ {Constraint,Rest2} = parse_Constraint(Rest0),
Rest4 = case Rest2 of
- [{'OF',_}, {identifier,_,_Id}|Rest3] ->
+ [{'OF',_},#identifier{}|Rest3] ->
%%% TODO: make some use of the identifier, maybe useful in the XML mapping
Rest3;
[{'OF',_}|Rest3] ->
Rest3;
_ ->
- throw({asn1_error,
- {get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'OF']}})
+ parse_error(Rest2)
end,
{Type,Rest5} = parse_Type(Rest4),
{#type{def = {'SET OF',Type},
constraint = merge_constraints([Constraint])},Rest5};
-parse_TypeWithConstraint([{'SET',_},{'SIZE',_},Lpar = {'(',_}|Rest]) ->
- {Constraint,Rest2} = parse_Constraint([Lpar|Rest]),
+parse_TypeWithConstraint([{'SET',_},{'SIZE',_}|[{'(',_}|_]=Rest0]) ->
+ {Constraint,Rest2} = parse_Constraint(Rest0),
#constraint{c=C} = Constraint,
- Constraint2 = Constraint#constraint{c={'SizeConstraint',C}},
+ Constraint2 = Constraint#constraint{c={element_set,
+ {'SizeConstraint',C},none}},
Rest4 = case Rest2 of
- [{'OF',_}, {identifier,_,_Id}|Rest3] ->
+ [{'OF',_},#identifier{}|Rest3] ->
%%% TODO: make some use of the identifier, maybe useful in the XML mapping
Rest3;
[{'OF',_}|Rest3] ->
Rest3;
_ ->
- throw({asn1_error,
- {get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'OF']}})
+ parse_error(Rest2)
end,
{Type,Rest5} = parse_Type(Rest4),
{#type{def = {'SET OF',Type},
constraint = merge_constraints([Constraint2])},Rest5};
parse_TypeWithConstraint(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- ['SEQUENCE','SEQUENCE SIZE','SET','SET SIZE'],
- followed,by,a,constraint]}}).
+ parse_error(Tokens).
%% --------------------------
parse_ReferencedType(Tokens) ->
- Flist = [fun parse_DefinedType/1,
+ Flist = [fun parse_ParameterizedType/1,
+ fun parse_DefinedType/1,
fun parse_SelectionType/1,
- fun parse_TypeFromObject/1,
- fun parse_ValueSetFromObjects/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ fun parse_TypeFromObject/1],
+ parse_or(Tokens, Flist).
-parse_DefinedType(Tokens=[{typereference,_,_},{'{',_}|_Rest]) ->
- parse_ParameterizedType(Tokens);
-parse_DefinedType(Tokens=[{typereference,L1,TypeName},
- T2={typereference,_,_},T3={'{',_}|Rest]) ->
- case (catch parse_ParameterizedType(Tokens)) of
- {'EXIT',_Reason} ->
- Rest2 = [T2,T3|Rest],
- {#type{def = #'Externaltypereference'{pos=L1,
- module=resolve_module(TypeName),
- type=TypeName}},Rest2};
- {asn1_error,_} ->
- Rest2 = [T2,T3|Rest],
- {#type{def = #'Externaltypereference'{pos=L1,
- module=resolve_module(TypeName),
- type=TypeName}},Rest2};
- Result ->
- Result
- end;
-parse_DefinedType(Tokens=[{typereference,_L1,_Module},{'.',_},
- {typereference,_,_TypeName},{'{',_}|_Rest]) ->
- parse_ParameterizedType(Tokens);
-parse_DefinedType([{typereference,L1,Module},{'.',_},{typereference,_,TypeName}|Rest]) ->
- {#type{def = #'Externaltypereference'{pos=L1,module=Module,type=TypeName}},Rest};
-parse_DefinedType([{typereference,L1,TypeName}|Rest]) ->
- case is_pre_defined_class(TypeName) of
- false ->
- {#type{def = #'Externaltypereference'{pos=L1,module=resolve_module(TypeName),
- type=TypeName}},Rest};
- _ ->
- throw({asn1_error,
- {L1,get(asn1_module),
- [got,TypeName,expected,
- [typereference,'typereference.typereference',
- 'typereference typereference']]}})
- end;
+parse_DefinedType([{typereference,L1,Module},
+ {'.',_},
+ {typereference,_,TypeName}|Tokens]) ->
+ {#type{def = #'Externaltypereference'{pos=L1,module=Module,
+ type=TypeName}},Tokens};
+parse_DefinedType([{typereference,_,_}=Tr|Tokens]) ->
+ {#type{def=tref2Exttref(Tr)},Tokens};
parse_DefinedType(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- [typereference,'typereference.typereference',
- 'typereference typereference']]}}).
+ parse_error(Tokens).
-parse_SelectionType([{identifier,_,Name},{'<',_}|Rest]) ->
+parse_SelectionType([#identifier{val=Name},{'<',_}|Rest]) ->
{Type,Rest2} = parse_Type(Rest),
- {{'SelectionType',Name,Type},Rest2};
+ {#type{def={'SelectionType',Name,Type}},Rest2};
parse_SelectionType(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'identifier <']}}).
+ parse_error(Tokens).
resolve_module(Type) ->
@@ -787,30 +686,13 @@ resolve_module(_Type, Current, undefined) ->
resolve_module(Type, Current, Imports) ->
case [Mod || #'SymbolsFromModule'{symbols = S, module = Mod} <- Imports,
#'Externaltypereference'{type = T} <- S,
- Type == T] of
+ Type =:= T] of
[#'Externaltypereference'{type = Mod}|_] -> Mod;
%% This allows the same symbol to be imported several times
%% which ought to be checked elsewhere and flagged as an error
[] -> Current
end.
-%% --------------------------
-
-
-%% This should probably be removed very soon
-% parse_ConstrainedType(Tokens) ->
-% case (catch parse_TypeWithConstraint(Tokens)) of
-% {'EXIT',Reason} ->
-% {Type,Rest} = parse_Type(Tokens),
-% {Constraint,Rest2} = parse_Constraint(Rest),
-% {Type#type{constraint=Constraint},Rest2};
-% {asn1_error,Reason2} ->
-% {Type,Rest} = parse_Type(Tokens),
-% {Constraint,Rest2} = parse_Constraint(Rest),
-% {Type#type{constraint=Constraint},Rest2};
-% Result ->
-% Result
-% end.
parse_Constraints(Tokens) ->
parse_Constraints(Tokens,[]).
@@ -819,9 +701,9 @@ parse_Constraints(Tokens,Acc) ->
{Constraint,Rest} = parse_Constraint(Tokens),
case Rest of
[{'(',_}|_Rest2] ->
- parse_Constraints(Rest,[Constraint|Acc]);
+ parse_Constraints(Rest, [Constraint|Acc]);
_ ->
- {lists:reverse([Constraint|Acc]),Rest}
+ {lists:reverse(Acc, [Constraint]),Rest}
end.
parse_Constraint([{'(',_}|Rest]) ->
@@ -830,46 +712,27 @@ parse_Constraint([{'(',_}|Rest]) ->
case Rest3 of
[{')',_}|Rest4] ->
{#constraint{c=Constraint,e=Exception},Rest4};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,')']}})
- end;
-parse_Constraint(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'(']}}).
+ [_|_] ->
+ parse_error(Rest3)
+ end.
parse_ConstraintSpec(Tokens) ->
Flist = [fun parse_GeneralConstraint/1,
fun parse_SubtypeConstraint/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- {asn1_error,Reason2} ->
- throw({asn1_error,Reason2});
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_ExceptionSpec([LPar={')',_}|Rest]) ->
{undefined,[LPar|Rest]};
parse_ExceptionSpec([{'!',_}|Rest]) ->
parse_ExceptionIdentification(Rest);
parse_ExceptionSpec(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,[')','!']]}}).
+ parse_error(Tokens).
parse_ExceptionIdentification(Tokens) ->
Flist = [fun parse_SignedNumber/1,
fun parse_DefinedValue/1,
fun parse_TypeColonValue/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- {asn1_error,Reason2} ->
- throw({asn1_error,Reason2});
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_TypeColonValue(Tokens) ->
{Type,Rest} = parse_Type(Tokens),
@@ -877,32 +740,28 @@ parse_TypeColonValue(Tokens) ->
[{':',_}|Rest2] ->
{Value,Rest3} = parse_Value(Rest2),
{{Type,Value},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,':']}})
+ [_|_] ->
+ parse_error(Rest)
end.
parse_SubtypeConstraint(Tokens) ->
parse_ElementSetSpecs(Tokens).
-parse_ElementSetSpecs([{'...',_}|Rest]) ->
- {Elements,Rest2} = parse_ElementSetSpec(Rest),
- {{[],Elements},Rest2};
parse_ElementSetSpecs(Tokens) ->
{RootElems,Rest} = parse_ElementSetSpec(Tokens),
case Rest of
[{',',_},{'...',_},{',',_}|Rest2] ->
{AdditionalElems,Rest3} = parse_ElementSetSpec(Rest2),
- {{RootElems,AdditionalElems},Rest3};
+ {{element_set,RootElems,AdditionalElems},Rest3};
[{',',_},{'...',_}|Rest2] ->
- {{RootElems,[]},Rest2};
+ {{element_set,RootElems,empty},Rest2};
_ ->
- {RootElems,Rest}
+ {{element_set,RootElems,none},Rest}
end.
parse_ElementSetSpec([{'ALL',_},{'EXCEPT',_}|Rest]) ->
{Exclusions,Rest2} = parse_Elements(Rest),
- {{'ALL',{'EXCEPT',Exclusions}},Rest2};
+ {{'ALL-EXCEPT',Exclusions},Rest2};
parse_ElementSetSpec(Tokens) ->
parse_Unions(Tokens).
@@ -918,14 +777,8 @@ parse_Unions(Tokens) ->
case {InterSec,Unions} of
{InterSec,[]} ->
{InterSec,Rest2};
- {{'SingleValue',V1},{'SingleValue',V2}} ->
- {{'SingleValue',ordsets:union(to_set(V1),to_set(V2))},Rest2};
- {V1,V2} when is_list(V2) ->
- {[V1] ++ [union|V2],Rest2};
{V1,V2} ->
- {[V1,union,V2],Rest2}
-% Other ->
-% throw(Other)
+ {{union,V1,V2},Rest2}
end.
parse_UnionsRec([{'|',_}|Rest]) ->
@@ -934,12 +787,8 @@ parse_UnionsRec([{'|',_}|Rest]) ->
case {InterSec,URec} of
{V1,[]} ->
{V1,Rest3};
- {{'SingleValue',V1},{'SingleValue',V2}} ->
- {{'SingleValue',ordsets:union(to_set(V1),to_set(V2))},Rest3};
- {V1,V2} when is_list(V2) ->
- {[V1] ++ [union|V2],Rest3};
{V1,V2} ->
- {[V1,union,V2],Rest3}
+ {{union,V1,V2},Rest3}
end;
parse_UnionsRec([{'UNION',Info}|Rest]) ->
parse_UnionsRec([{'|',Info}|Rest]);
@@ -952,13 +801,8 @@ parse_Intersections(Tokens) ->
case {InterSec,IRec} of
{V1,[]} ->
{V1,Rest2};
- {{'SingleValue',V1},{'SingleValue',V2}} ->
- {{'SingleValue',
- ordsets:intersection(to_set(V1),to_set(V2))},Rest2};
- {V1,V2} when is_list(V2) ->
- {[V1] ++ [intersection|V2],Rest2};
{V1,V2} ->
- {[V1,intersection,V2],Rest2}
+ {{intersection,V1,V2},Rest2}
end.
%% parse_IElemsRec(Tokens) -> Result
@@ -967,15 +811,10 @@ parse_IElemsRec([{'^',_}|Rest]) ->
{InterSec,Rest2} = parse_IntersectionElements(Rest),
{IRec,Rest3} = parse_IElemsRec(Rest2),
case {InterSec,IRec} of
- {{'SingleValue',V1},{'SingleValue',V2}} ->
- {{'SingleValue',
- ordsets:intersection(to_set(V1),to_set(V2))},Rest3};
{V1,[]} ->
- {V1,Rest3};
- {V1,V2} when is_list(V2) ->
- {[V1] ++ [intersection|V2],Rest3};
+ {V1,Rest2};
{V1,V2} ->
- {[V1,intersection,V2],Rest3}
+ {{intersection,V1,V2},Rest3}
end;
parse_IElemsRec([{'INTERSECTION',Info}|Rest]) ->
parse_IElemsRec([{'^',Info}|Rest]);
@@ -992,7 +831,7 @@ parse_IntersectionElements(Tokens) ->
case Rest of
[{'EXCEPT',_}|Rest2] ->
{Exclusion,Rest3} = parse_Elements(Rest2),
- {{InterSec,{'EXCEPT',Exclusion}},Rest3};
+ {{'EXCEPT',InterSec,Exclusion},Rest3};
Rest ->
{InterSec,Rest}
end.
@@ -1006,102 +845,73 @@ parse_Elements([{'(',_}|Rest]) ->
case Rest2 of
[{')',_}|Rest3] ->
{Elems,Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,')']}})
+ [_|_] ->
+ parse_error(Rest2)
end;
parse_Elements(Tokens) ->
Flist = [fun parse_ObjectSetElements/1,
fun parse_SubtypeElements/1,
-% fun parse_Value/1,
-% fun parse_Type/1,
fun parse_Object/1,
fun parse_DefinedObjectSet/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- Err = {asn1_error,_} ->
- throw(Err);
- Result = {Val,_} when is_record(Val,type) ->
- Result;
-
- Result ->
- Result
- end.
-
-
+ parse_or(Tokens, Flist).
%% --------------------------
-parse_DefinedObjectClass([{typereference,_,_ModName},{'.',_},Tr={typereference,_,_ObjClName}|Rest]) ->
-%% {{objectclassname,ModName,ObjClName},Rest};
-% {{objectclassname,tref2Exttref(Tr)},Rest};
- {tref2Exttref(Tr),Rest};
+parse_DefinedObjectClass([{typereference,_,ModName},{'.',_},
+ {typereference,Pos,Name}|Tokens]) ->
+ Ext = #'Externaltypereference'{pos=Pos,
+ module=ModName,
+ type=Name},
+ {Ext,Tokens};
parse_DefinedObjectClass([Tr={typereference,_,_ObjClName}|Rest]) ->
-% {{objectclassname,tref2Exttref(Tr)},Rest};
{tref2Exttref(Tr),Rest};
parse_DefinedObjectClass(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- ['typereference . typereference',
- typereference,
- 'TYPE-IDENTIFIER',
- 'ABSTRACT-SYNTAX']]}}).
-
-parse_ObjectClassAssignment([{typereference,L1,ObjClName},{'::=',_}|Rest]) ->
- {Type,Rest2} = parse_ObjectClass(Rest),
- {#classdef{pos=L1,name=ObjClName,module=resolve_module(Type),
- typespec=Type},Rest2};
-parse_ObjectClassAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- 'typereference ::=']}}).
+ parse_error(Tokens).
parse_ObjectClass(Tokens) ->
- Flist = [fun parse_DefinedObjectClass/1,
- fun parse_ObjectClassDefn/1,
- fun parse_ParameterizedObjectClass/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- {asn1_error,Reason2} ->
- throw({asn1_error,Reason2});
- Result ->
- Result
- end.
+ Flist = [fun parse_ObjectClassDefn/1,
+ fun parse_DefinedObjectClass/1],
+ parse_or(Tokens, Flist).
parse_ObjectClassDefn([{'CLASS',_},{'{',_}|Rest]) ->
{Type,Rest2} = parse_FieldSpec(Rest),
{WithSyntaxSpec,Rest3} = parse_WithSyntaxSpec(Rest2),
{#objectclass{fields=Type,syntax=WithSyntaxSpec},Rest3};
parse_ObjectClassDefn(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'CLASS {']}}).
+ parse_error(Tokens).
parse_FieldSpec(Tokens) ->
parse_FieldSpec(Tokens,[]).
-parse_FieldSpec(Tokens,Acc) ->
- Flist = [fun parse_FixedTypeValueFieldSpec/1,
- fun parse_VariableTypeValueFieldSpec/1,
- fun parse_ObjectFieldSpec/1,
- fun parse_FixedTypeValueSetFieldSpec/1,
- fun parse_VariableTypeValueSetFieldSpec/1,
- fun parse_TypeFieldSpec/1,
- fun parse_ObjectSetFieldSpec/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
+parse_FieldSpec(Tokens0, Acc) ->
+ Fl = case Tokens0 of
+ [{valuefieldreference,_,_}|_] ->
+ %% 1) &field Type
+ %% &object CLASS-NAME
+ %% 2) &field &FieldName
+ %% A fixed type field cannot be distinguished from
+ %% an object field without type information.
+ [fun parse_FixedTypeValueFieldSpec/1,
+ fun parse_VariableTypeValueFieldSpec/1];
+ [{typefieldreference,_,_}|_] ->
+ %% 1) &Set Type
+ %% &ObjectSet CLASS-NAME
+ %% 2) &Set &FieldName
+ %% 3) &Type
+ %% A value set and an object cannot be distinguished
+ %% without type information.
+ [fun parse_FixedTypeValueSetFieldSpec/1,
+ fun parse_VariableTypeValueSetFieldSpec/1,
+ fun parse_TypeFieldSpec/1];
+ [_|_] ->
+ parse_error(Tokens0)
+ end,
+ case parse_or(Tokens0, Fl) of
{Type,[{'}',_}|Rest]} ->
- {lists:reverse([Type|Acc]),Rest};
+ {lists:reverse(Acc, [Type]),Rest};
{Type,[{',',_}|Rest2]} ->
- parse_FieldSpec(Rest2,[Type|Acc]);
- {_,[H|_T]} ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'}']}})
+ parse_FieldSpec(Rest2, [Type|Acc])
end.
parse_PrimitiveFieldName([{typefieldreference,_,FieldName}|Rest]) ->
@@ -1109,27 +919,19 @@ parse_PrimitiveFieldName([{typefieldreference,_,FieldName}|Rest]) ->
parse_PrimitiveFieldName([{valuefieldreference,_,FieldName}|Rest]) ->
{{valuefieldreference,FieldName},Rest};
parse_PrimitiveFieldName(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- [typefieldreference,valuefieldreference]]}}).
+ parse_error(Tokens).
parse_FieldName(Tokens) ->
{Field,Rest} = parse_PrimitiveFieldName(Tokens),
parse_FieldName(Rest,[Field]).
-parse_FieldName([{'.',_}|Rest],Acc) ->
- case (catch parse_PrimitiveFieldName(Rest)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- {FieldName,Rest2} ->
- parse_FieldName(Rest2,[FieldName|Acc])
- end;
-parse_FieldName(Tokens,Acc) ->
+parse_FieldName([{'.',_}|Rest0],Acc) ->
+ {FieldName,Rest1} = parse_PrimitiveFieldName(Rest0),
+ parse_FieldName(Rest1, [FieldName|Acc]);
+parse_FieldName(Tokens, Acc) ->
{lists:reverse(Acc),Tokens}.
-parse_FixedTypeValueFieldSpec([{valuefieldreference,L1,VFieldName}|Rest]) ->
+parse_FixedTypeValueFieldSpec([{valuefieldreference,_,VFieldName}|Rest]) ->
{Type,Rest2} = parse_Type(Rest),
{Unique,Rest3} =
case Rest2 of
@@ -1139,109 +941,61 @@ parse_FixedTypeValueFieldSpec([{valuefieldreference,L1,VFieldName}|Rest]) ->
{undefined,Rest2}
end,
{OptionalitySpec,Rest5} = parse_ValueOptionalitySpec(Rest3),
- case {Unique,Rest5} of
- {'UNIQUE',[{Del,_}|_]} when Del =:= ','; Del =:= '}' ->
- case OptionalitySpec of
- {'DEFAULT',_} ->
- throw({asn1_error,
- {L1,get(asn1_module),
- ['UNIQUE and DEFAULT in same field',VFieldName]}});
- _ ->
- {{fixedtypevaluefield,VFieldName,Type,Unique,OptionalitySpec},Rest5}
- end;
- {_,[{Del,_}|_]} when Del =:= ','; Del =:= '}' ->
- {{object_or_fixedtypevalue_field,VFieldName,Type,Unique,OptionalitySpec},Rest5};
- _ ->
- throw({asn1_error,{L1,get(asn1_module),
- [got,get_token(hd(Rest5)),expected,[',','}']]}})
- end;
-parse_FixedTypeValueFieldSpec(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,valuefieldreference]}}).
+ case is_end_delimiter(Rest5) of
+ false -> parse_error(Rest5);
+ true -> ok
+ end,
+ Tag = case Unique of
+ 'UNIQUE' -> fixedtypevaluefield;
+ _ -> object_or_fixedtypevalue_field
+ end,
+ {{Tag,VFieldName,Type,Unique,OptionalitySpec},Rest5}.
+
+parse_VariableTypeValueFieldSpec([{valuefieldreference,_,VFieldName}|Rest0]) ->
+ {FieldRef,Rest1} = parse_FieldName(Rest0),
+ {OptionalitySpec,Rest} = parse_ValueOptionalitySpec(Rest1),
+ case is_end_delimiter(Rest) of
+ true ->
+ {{variabletypevaluefield,VFieldName,FieldRef,OptionalitySpec},
+ Rest};
+ false ->
+ parse_error(Rest)
+ end.
-parse_VariableTypeValueFieldSpec([{valuefieldreference,L,VFieldName}|Rest]) ->
- {FieldRef,Rest2} = parse_FieldName(Rest),
- {OptionalitySpec,Rest3} = parse_ValueOptionalitySpec(Rest2),
- case Rest3 of
- [{Del,_}|_] when Del =:= ','; Del =:= '}' ->
- {{variabletypevaluefield,VFieldName,FieldRef,OptionalitySpec},Rest3};
- _ ->
- throw({asn1_error,{L,get(asn1_module),
- [got,get_token(hd(Rest3)),expected,[',','}']]}})
- end;
-parse_VariableTypeValueFieldSpec(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,valuefieldreference]}}).
+parse_TypeFieldSpec([{typefieldreference,_,Name}|Rest0]) ->
+ {OptionalitySpec,Rest} = parse_TypeOptionalitySpec(Rest0),
+ case is_end_delimiter(Rest) of
+ true ->
+ {{typefield,Name,OptionalitySpec},Rest};
+ false ->
+ parse_error(Rest)
+ end.
-parse_ObjectFieldSpec([{valuefieldreference,L,VFieldName}|Rest]) ->
- {Class,Rest2} = parse_DefinedObjectClass(Rest),
- {OptionalitySpec,Rest3} = parse_ObjectOptionalitySpec(Rest2),
- case Rest3 of
- [{Del,_}|_] when Del =:= ','; Del =:= '}' ->
- {{objectfield,VFieldName,Class,undefined,OptionalitySpec},Rest3};
- _ ->
- throw({asn1_error,{L,get(asn1_module),
- [got,get_token(hd(Rest3)),expected,[',','}']]}})
- end;
-parse_ObjectFieldSpec(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,valuefieldreference]}}).
+parse_FixedTypeValueSetFieldSpec([{typefieldreference,_,Name}|Rest0]) ->
+ {Type,Rest1} = parse_Type(Rest0),
+ {OptionalitySpec,Rest} = parse_ValueSetOptionalitySpec(Rest1),
+ case is_end_delimiter(Rest) of
+ true ->
+ {{objectset_or_fixedtypevalueset_field,Name,Type,
+ OptionalitySpec},Rest};
+ false ->
+ parse_error(Rest)
+ end.
-parse_TypeFieldSpec([{typefieldreference,L,TFieldName}|Rest]) ->
- {OptionalitySpec,Rest2} = parse_TypeOptionalitySpec(Rest),
- case Rest2 of
- [{Del,_}|_] when Del =:= ','; Del =:= '}' ->
- {{typefield,TFieldName,OptionalitySpec},Rest2};
- _ ->
- throw({asn1_error,{L,get(asn1_module),
- [got,get_token(hd(Rest2)),expected,[',','}']]}})
- end;
-parse_TypeFieldSpec(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,typefieldreference]}}).
+parse_VariableTypeValueSetFieldSpec([{typefieldreference,_,Name}|Rest0]) ->
+ {FieldRef,Rest1} = parse_FieldName(Rest0),
+ {OptionalitySpec,Rest} = parse_ValueSetOptionalitySpec(Rest1),
+ case is_end_delimiter(Rest) of
+ true ->
+ {{variabletypevaluesetfield,Name,FieldRef,OptionalitySpec},
+ Rest};
+ false ->
+ parse_error(Rest)
+ end.
-parse_FixedTypeValueSetFieldSpec([{typefieldreference,L,TFieldName}|Rest]) ->
- {Type,Rest2} = parse_Type(Rest),
- {OptionalitySpec,Rest3} = parse_ValueSetOptionalitySpec(Rest2),
- case Rest3 of
- [{Del,_}|_] when Del =:= ','; Del =:= '}' ->
- {{objectset_or_fixedtypevalueset_field,TFieldName,Type,
- OptionalitySpec},Rest3};
- _ ->
- throw({asn1_error,{L,get(asn1_module),
- [got,get_token(hd(Rest3)),expected,[',','}']]}})
- end;
-parse_FixedTypeValueSetFieldSpec(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,typefieldreference]}}).
-
-parse_VariableTypeValueSetFieldSpec([{typefieldreference,L,TFieldName}|Rest]) ->
- {FieldRef,Rest2} = parse_FieldName(Rest),
- {OptionalitySpec,Rest3} = parse_ValueSetOptionalitySpec(Rest2),
- case Rest3 of
- [{Del,_}|_] when Del =:= ','; Del =:= '}' ->
- {{variabletypevaluesetfield,TFieldName,FieldRef,OptionalitySpec},Rest3};
- _ ->
- throw({asn1_error,{L,get(asn1_module),
- [got,get_token(hd(Rest3)),expected,[',','}']]}})
- end;
-parse_VariableTypeValueSetFieldSpec(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,typefieldreference]}}).
-
-parse_ObjectSetFieldSpec([{typefieldreference,L,TFieldName}|Rest]) ->
- {Class,Rest2} = parse_DefinedObjectClass(Rest),
- {OptionalitySpec,Rest3} = parse_ObjectSetOptionalitySpec(Rest2),
- case Rest3 of
- [{Del,_}|_] when Del =:= ','; Del =:= '}' ->
- {{objectsetfield,TFieldName,Class,OptionalitySpec},Rest3};
- _ ->
- throw({asn1_error,{L,get(asn1_module),
- [got,get_token(hd(Rest3)),expected,[',','}']]}})
- end;
-parse_ObjectSetFieldSpec(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,typefieldreference]}}).
+is_end_delimiter([{',',_}|_]) -> true;
+is_end_delimiter([{'}',_}|_]) -> true;
+is_end_delimiter([_|_]) -> false.
parse_ValueOptionalitySpec(Tokens)->
case Tokens of
@@ -1252,15 +1006,6 @@ parse_ValueOptionalitySpec(Tokens)->
_ -> {'MANDATORY',Tokens}
end.
-parse_ObjectOptionalitySpec(Tokens) ->
- case Tokens of
- [{'OPTIONAL',_}|Rest] -> {'OPTIONAL',Rest};
- [{'DEFAULT',_}|Rest] ->
- {Object,Rest2} = parse_Object(Rest),
- {{'DEFAULT',Object},Rest2};
- _ -> {'MANDATORY',Tokens}
- end.
-
parse_TypeOptionalitySpec(Tokens) ->
case Tokens of
[{'OPTIONAL',_}|Rest] -> {'OPTIONAL',Rest};
@@ -1279,65 +1024,44 @@ parse_ValueSetOptionalitySpec(Tokens) ->
_ -> {'MANDATORY',Tokens}
end.
-parse_ObjectSetOptionalitySpec(Tokens) ->
- case Tokens of
- [{'OPTIONAL',_}|Rest] -> {'OPTIONAL',Rest};
- [{'DEFAULT',_}|Rest] ->
- {ObjectSet,Rest2} = parse_ObjectSet(Rest),
- {{'DEFAULT',ObjectSet},Rest2};
- _ -> {'MANDATORY',Tokens}
- end.
-
parse_WithSyntaxSpec([{'WITH',_},{'SYNTAX',_}|Rest]) ->
{SyntaxList,Rest2} = parse_SyntaxList(Rest),
{{'WITH SYNTAX',SyntaxList},Rest2};
parse_WithSyntaxSpec(Tokens) ->
{[],Tokens}.
-parse_SyntaxList([{'{',_},{'}',_}|Rest]) ->
- {[],Rest};
parse_SyntaxList([{'{',_}|Rest]) ->
parse_SyntaxList(Rest,[]);
parse_SyntaxList(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,['{}','{']]}}).
+ parse_error(Tokens).
-parse_SyntaxList(Tokens,Acc) ->
+parse_SyntaxList(Tokens, Acc) ->
{SyntaxList,Rest} = parse_TokenOrGroupSpec(Tokens),
case Rest of
[{'}',_}|Rest2] ->
- {lists:reverse([SyntaxList|Acc]),Rest2};
+ {lists:reverse(Acc, [SyntaxList]),Rest2};
_ ->
- parse_SyntaxList(Rest,[SyntaxList|Acc])
+ parse_SyntaxList(Rest, [SyntaxList|Acc])
end.
parse_TokenOrGroupSpec(Tokens) ->
Flist = [fun parse_RequiredToken/1,
fun parse_OptionalGroup/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
-parse_RequiredToken([{typereference,L1,WordName}|Rest]) ->
+parse_RequiredToken([{typereference,_,WordName}|Rest]=Tokens) ->
case is_word(WordName) of
false ->
- throw({asn1_error,{L1,get(asn1_module),
- [got,WordName,expected,a,'Word']}});
+ parse_error(Tokens);
true ->
{WordName,Rest}
end;
parse_RequiredToken([{',',L1}|Rest]) ->
{{',',L1},Rest};
-parse_RequiredToken([{WordName,L1}|Rest]) ->
+parse_RequiredToken([{WordName,_}|Rest]=Tokens) ->
case is_word(WordName) of
false ->
- throw({asn1_error,{L1,get(asn1_module),
- [got,WordName,expected,a,'Word']}});
+ parse_error(Tokens);
true ->
{WordName,Rest}
end;
@@ -1347,7 +1071,9 @@ parse_RequiredToken(Tokens) ->
parse_OptionalGroup([{'[',_}|Rest]) ->
{Spec,Rest2} = parse_TokenOrGroupSpec(Rest),
{SpecList,Rest3} = parse_OptionalGroup(Rest2,[Spec]),
- {SpecList,Rest3}.
+ {SpecList,Rest3};
+parse_OptionalGroup(Tokens) ->
+ parse_error(Tokens).
parse_OptionalGroup([{']',_}|Rest],Acc) ->
{lists:reverse(Acc),Rest};
@@ -1355,82 +1081,55 @@ parse_OptionalGroup(Tokens,Acc) ->
{Spec,Rest} = parse_TokenOrGroupSpec(Tokens),
parse_OptionalGroup(Rest,[Spec|Acc]).
-parse_DefinedObject([Id={identifier,_,_ObjName}|Rest]) ->
+parse_DefinedObject([#identifier{}=Id|Rest]) ->
{{object,identifier2Extvalueref(Id)},Rest};
-parse_DefinedObject([{typereference,L1,ModName},{'.',_},{identifier,_,ObjName}|Rest]) ->
+parse_DefinedObject([{typereference,L1,ModName},{'.',_},#identifier{val=ObjName}|Rest]) ->
{{object, #'Externaltypereference'{pos=L1,module=ModName,type=ObjName}},Rest};
parse_DefinedObject(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- [identifier,'typereference.identifier']]}}).
+ parse_error(Tokens).
-parse_ObjectAssignment([{identifier,L1,ObjName}|Rest]) ->
+parse_ObjectAssignment([#identifier{pos=L1,val=ObjName}|Rest]) ->
{Class,Rest2} = parse_DefinedObjectClass(Rest),
case Rest2 of
[{'::=',_}|Rest3] ->
{Object,Rest4} = parse_Object(Rest3),
{#typedef{pos=L1,name=ObjName,
typespec=#'Object'{classname=Class,def=Object}},Rest4};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'::=']}});
- Other ->
- throw({asn1_error,{L1,get(asn1_module),
- [got,Other,expected,'::=']}})
- end;
-parse_ObjectAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,identifier]}}).
-
+ _ ->
+ parse_error(Rest2)
+ end.
%% parse_Object(Tokens) -> Ret
%% Tokens = [Tok]
%% Tok = tuple()
%% Ret = {object,_} | {object, _, _}
parse_Object(Tokens) ->
- Flist=[fun parse_ObjectDefn/1,
- fun parse_ObjectFromObject/1,
- fun parse_ParameterizedObject/1,
- fun parse_DefinedObject/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ %% The ObjectFromObject production is not included here,
+ %% since it will have been catched by the ValueFromObject
+ %% before we reach this point.
+ Flist = [fun parse_ObjectDefn/1,
+ fun parse_DefinedObject/1],
+ parse_or(Tokens, Flist).
parse_ObjectDefn(Tokens) ->
Flist=[fun parse_DefaultSyntax/1,
fun parse_DefinedSyntax/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
-parse_DefaultSyntax([{'{',_},{'}',_}|Rest]) ->
- {{object,defaultsyntax,[]},Rest};
parse_DefaultSyntax([{'{',_}|Rest]) ->
parse_DefaultSyntax(Rest,[]);
parse_DefaultSyntax(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,['{}','{']]}}).
+ parse_error(Tokens).
-parse_DefaultSyntax(Tokens,Acc) ->
+parse_DefaultSyntax(Tokens, Acc) ->
{Setting,Rest} = parse_FieldSetting(Tokens),
case Rest of
[{',',_}|Rest2] ->
parse_DefaultSyntax(Rest2,[Setting|Acc]);
[{'}',_}|Rest3] ->
- {{object,defaultsyntax,lists:reverse([Setting|Acc])},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,[',','}']]}})
+ {{object,defaultsyntax,lists:reverse(Acc, [Setting])},Rest3};
+ _ ->
+ parse_error(Rest)
end.
parse_FieldSetting(Tokens) ->
@@ -1439,7 +1138,9 @@ parse_FieldSetting(Tokens) ->
{{PrimFieldName,Setting},Rest2}.
parse_DefinedSyntax([{'{',_}|Rest]) ->
- parse_DefinedSyntax(Rest,[]).
+ parse_DefinedSyntax(Rest, []);
+parse_DefinedSyntax(Tokens) ->
+ parse_error(Tokens).
parse_DefinedSyntax(Tokens,Acc) ->
case Tokens of
@@ -1455,95 +1156,70 @@ parse_DefinedSyntax(Tokens,Acc) ->
%% Literal ::= word | ','
%% Setting ::= Type | Value | ValueSet | Object | ObjectSet
%% word equals typereference, but no lower cases
-parse_DefinedSyntaxToken([{',',L1}|Rest]) ->
- {{',',L1},Rest};
+parse_DefinedSyntaxToken([{',',_}=Comma|Rest]) ->
+ {Comma,Rest};
%% ObjectClassFieldType or a defined type with a constraint.
%% Should also be able to parse a parameterized type. It may be
%% impossible to distinguish between a parameterized type and a Literal
%% followed by an object set.
-parse_DefinedSyntaxToken(Tokens=[{typereference,L1,_Name},{T,_}|_Rest])
- when T == '.'; T == '(' ->
- case catch parse_Setting(Tokens) of
- {asn1_error,_} ->
- throw({asn1_error,{L1,get(asn1_module),
- [got,hd(Tokens), expected,['Word',setting]]}});
- {'EXIT',Reason} ->
- exit(Reason);
- Result ->
- Result
- end;
-parse_DefinedSyntaxToken(Tokens=[TRef={typereference,L1,Name}|Rest]) ->
+parse_DefinedSyntaxToken([{typereference,_,_Name},{T,_}|_]=Tokens)
+ when T =:= '.'; T =:= '(' ->
+ parse_Setting(Tokens);
+parse_DefinedSyntaxToken([{typereference,L1,Name}=TRef|Rest]=Tokens) ->
case is_word(Name) of
false ->
case lookahead_definedsyntax(Rest) of
word_or_setting ->
{{setting,L1,tref2Exttref(TRef)},Rest};
- _ ->
+ setting ->
parse_Setting(Tokens)
end;
true ->
- %% {{word_or_setting,L1,Name},Rest}
{{word_or_setting,L1,tref2Exttref(TRef)},Rest}
end;
parse_DefinedSyntaxToken(Tokens) ->
- case catch parse_Setting(Tokens) of
- {asn1_error,_} ->
- parse_Word(Tokens);
- {'EXIT',Reason} ->
- exit(Reason);
- Result ->
+ try parse_Setting(Tokens) of
+ {_,_}=Result ->
Result
+ catch
+ throw:{asn1_error,_} ->
+ parse_Word(Tokens)
end.
lookahead_definedsyntax([{typereference,_,Name}|_Rest]) ->
- case is_word(Name) of
+ case is_word(Name) of
true -> word_or_setting;
- _ -> setting
+ false -> setting
end;
lookahead_definedsyntax([{'}',_}|_Rest]) ->
word_or_setting;
lookahead_definedsyntax(_) ->
setting.
-parse_Word([{Name,Pos}|Rest]) ->
+parse_Word([{Name,Pos}|Rest]=Tokens) ->
case is_word(Name) of
false ->
- throw({asn1_error,{Pos,get(asn1_module),
- [got,Name, expected,a,'Word']}});
+ parse_error(Tokens);
true ->
{{word_or_setting,Pos,tref2Exttref(Pos,Name)},Rest}
- end.
+ end;
+parse_Word(Tokens) ->
+ parse_error(Tokens).
parse_Setting(Tokens) ->
Flist = [{type_tag,fun parse_Type/1},
{value_tag,fun parse_Value/1},
{object_tag,fun parse_Object/1},
{objectset_tag,fun parse_ObjectSet/1}],
- case (catch parse_or_tag(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result = {{value_tag,_},_} ->
+ case parse_or_tag(Tokens, Flist) of
+ {{value_tag,_},_}=Result ->
+ %% Keep the value_tag.
Result;
{{Tag,Setting},Rest} when is_atom(Tag) ->
+ %% Remove all other tags.
{Setting,Rest}
end.
-%% parse_Setting(Tokens) ->
-%% Flist = [fun parse_Type/1,
-%% fun parse_Value/1,
-%% fun parse_Object/1,
-%% fun parse_ObjectSet/1],
-%% case (catch parse_or(Tokens,Flist)) of
-%% {'EXIT',Reason} ->
-%% exit(Reason);
-%% AsnErr = {asn1_error,_} ->
-%% throw(AsnErr);
-%% Result ->
-%% Result
-%% end.
-
parse_DefinedObjectSet([{typereference,L1,ModuleName},{'.',_},
{typereference,L2,ObjSetName}|Rest]) ->
{{objectset,L1,#'Externaltypereference'{pos=L2,module=ModuleName,
@@ -1552,9 +1228,7 @@ parse_DefinedObjectSet([{typereference,L1,ObjSetName}|Rest]) ->
{{objectset,L1,#'Externaltypereference'{pos=L1,module=resolve_module(ObjSetName),
type=ObjSetName}},Rest};
parse_DefinedObjectSet(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- [typereference,'typereference.typereference']]}}).
+ parse_error(Tokens).
parse_ObjectSetAssignment([{typereference,L1,ObjSetName}|Rest]) ->
{Class,Rest2} = parse_DefinedObjectClass(Rest),
@@ -1564,16 +1238,9 @@ parse_ObjectSetAssignment([{typereference,L1,ObjSetName}|Rest]) ->
{#typedef{pos=L1,name=ObjSetName,
typespec=#'ObjectSet'{class=Class,
set=ObjectSet}},Rest4};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'::=']}})
-%%% Other ->
-%%% throw(Other)
- end;
-parse_ObjectSetAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- typereference]}}).
+ _ ->
+ parse_error(Rest2)
+ end.
%% parse_ObjectSet(Tokens) -> {Ret,Rest}
%% Tokens = [Tok]
@@ -1590,26 +1257,20 @@ parse_ObjectSet([{'{',_}|Rest]) ->
case Rest2 of
[{'}',_}|Rest3] ->
{ObjSetSpec,Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'}']}})
+ _ ->
+ parse_error(Rest2)
end;
parse_ObjectSet(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'{']}}).
+ parse_error(Tokens).
-parse_ObjectSetSpec([{'...',_}|Rest]) ->
- case Rest of
- [{',',_}|Rest2] ->
- {Elements,Rest3}=parse_ElementSetSpecs(Rest2),
- {{[],Elements},Rest3};
- _ ->
- {['EXTENSIONMARK'],Rest}
- end;
+parse_ObjectSetSpec([{'...',_},{',',_}|Tokens0]) ->
+ {Elements,Tokens} = parse_ElementSetSpec(Tokens0),
+ {{element_set,empty,Elements},Tokens};
+parse_ObjectSetSpec([{'...',_}|Tokens]) ->
+ {{element_set,empty,empty},Tokens};
parse_ObjectSetSpec(Tokens) ->
parse_ElementSetSpecs(Tokens).
-% moved fun parse_Object/1 and fun parse_DefinedObjectSet/1 to parse_Elements
%% parse_ObjectSetElements(Tokens) -> {Result,Rest}
%% Result ::= {'ObjectSetFromObjects',Objects,Name} | {pos,ObjectSet,Params}
%% Objects ::= ReferencedObjects
@@ -1619,18 +1280,9 @@ parse_ObjectSetSpec(Tokens) ->
%% ObjectSet ::= {objectset,integer(),#'Externaltypereference'{}}
%% Params ::= list() (see parse_ActualParameterList/1)
parse_ObjectSetElements(Tokens) ->
- Flist = [%fun parse_Object/1,
- %fun parse_DefinedObjectSet/1,
- fun parse_ObjectSetFromObjects/1,
+ Flist = [fun parse_ObjectSetFromObjects/1,
fun parse_ParameterizedObjectSet/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_ObjectClassFieldType(Tokens) ->
{Class,Rest} = parse_DefinedObjectClass(Tokens),
@@ -1641,25 +1293,10 @@ parse_ObjectClassFieldType(Tokens) ->
classname=Class,
class=Class,fieldname=FieldName},
{#type{def=OCFT},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'.']}})
-%%% Other ->
-%%% throw(Other)
+ _ ->
+ parse_error(Rest)
end.
-%parse_ObjectClassFieldValue(Tokens) ->
-% Flist = [fun parse_OpenTypeFieldVal/1,
-% fun parse_FixedTypeFieldVal/1],
-% case (catch parse_or(Tokens,Flist)) of
-% {'EXIT',Reason} ->
-% throw(Reason);
-% AsnErr = {asn1_error,_} ->
-% throw(AsnErr);
-% Result ->
-% Result
-% end.
-
parse_ObjectClassFieldValue(Tokens) ->
parse_OpenTypeFieldVal(Tokens).
@@ -1669,28 +1306,10 @@ parse_OpenTypeFieldVal(Tokens) ->
[{':',_}|Rest2] ->
{Value,Rest3} = parse_Value(Rest2),
{{opentypefieldvalue,Type,Value},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,':']}})
+ _ ->
+ parse_error(Rest)
end.
-% parse_FixedTypeFieldVal(Tokens) ->
-% parse_Value(Tokens).
-
-% parse_InformationFromObjects(Tokens) ->
-% Flist = [fun parse_ValueFromObject/1,
-% fun parse_ValueSetFromObjects/1,
-% fun parse_TypeFromObject/1,
-% fun parse_ObjectFromObject/1],
-% case (catch parse_or(Tokens,Flist)) of
-% {'EXIT',Reason} ->
-% throw(Reason);
-% AsnErr = {asn1_error,_} ->
-% throw(AsnErr);
-% Result ->
-% Result
-% end.
-
%% parse_ReferencedObjects(Tokens) -> {Result,Rest}
%% Result ::= DefObject | DefObjSet |
%% {po,DefObject,Params} | {pos,DefObjSet,Params} |
@@ -1702,18 +1321,11 @@ parse_OpenTypeFieldVal(Tokens) ->
parse_ReferencedObjects(Tokens) ->
Flist = [fun parse_DefinedObject/1,
fun parse_DefinedObjectSet/1,
- fun parse_ParameterizedObject/1,
fun parse_ParameterizedObjectSet/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_ValueFromObject(Tokens) ->
+ %% This production also matches ObjectFromObject.
{Objects,Rest} = parse_ReferencedObjects(Tokens),
case Rest of
[{'.',_}|Rest2] ->
@@ -1722,35 +1334,10 @@ parse_ValueFromObject(Tokens) ->
{valuefieldreference,_} ->
{{'ValueFromObject',Objects,Name},Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,typefieldreference,expected,
- valuefieldreference]}})
+ parse_error(Rest2)
end;
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'.']}})
-%%% Other ->
-%%% throw({asn1_error,{got,Other,expected,'.'}})
- end.
-
-parse_ValueSetFromObjects(Tokens) ->
- {Objects,Rest} = parse_ReferencedObjects(Tokens),
- case Rest of
- [{'.',_}|Rest2] ->
- {Name,Rest3} = parse_FieldName(Rest2),
- case lists:last(Name) of
- {typefieldreference,_FieldName} ->
- {{'ValueSetFromObjects',Objects,Name},Rest3};
- _ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,
- typefieldreference]}})
- end;
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'.']}})
-%%% Other ->
-%%% throw({asn1_error,{got,Other,expected,'.'}})
+ _ ->
+ parse_error(Rest)
end.
parse_TypeFromObject(Tokens) ->
@@ -1760,28 +1347,12 @@ parse_TypeFromObject(Tokens) ->
{Name,Rest3} = parse_FieldName(Rest2),
case lists:last(Name) of
{typefieldreference,_FieldName} ->
- {{'TypeFromObject',Objects,Name},Rest3};
+ {#type{def={'TypeFromObject',Objects,Name}},Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,
- typefieldreference]}})
+ parse_error(Rest2)
end;
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'.']}})
-%%% Other ->
-%%% throw({asn1_error,{got,Other,expected,'.'}})
- end.
-
-parse_ObjectFromObject(Tokens) ->
- {Objects,Rest} = parse_ReferencedObjects(Tokens),
- case Rest of
- [{'.',_}|Rest2] ->
- {Name,Rest3} = parse_FieldName(Rest2),
- {{'ObjectFromObject',Objects,Name},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'.']}})
+ _ ->
+ parse_error(Rest)
end.
%% parse_ObjectSetFromObjects(Tokens) -> {Result,Rest}
@@ -1799,23 +1370,12 @@ parse_ObjectSetFromObjects(Tokens) ->
{typefieldreference,_FieldName} ->
{{'ObjectSetFromObjects',Objects,Name},Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,
- typefieldreference]}})
+ parse_error(Rest2)
end;
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'.']}})
+ _ ->
+ parse_error(Rest)
end.
-% parse_InstanceOfType([{'INSTANCE',_},{'OF',_}|Rest]) ->
-% {Class,Rest2} = parse_DefinedObjectClass(Rest),
-% {{'InstanceOfType',Class},Rest2}.
-
-% parse_InstanceOfValue(Tokens) ->
-% parse_Value(Tokens).
-
-
%% X.682 constraint specification
@@ -1823,14 +1383,7 @@ parse_GeneralConstraint(Tokens) ->
Flist = [fun parse_UserDefinedConstraint/1,
fun parse_TableConstraint/1,
fun parse_ContentsConstraint/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_UserDefinedConstraint([{'CONSTRAINED',_},{'BY',_},{'{',_},{'}',_}|Rest])->
{{constrained_by,[]},Rest};
@@ -1841,32 +1394,23 @@ parse_UserDefinedConstraint([{'CONSTRAINED',_},
case Rest2 of
[{'}',_}|Rest3] ->
{{constrained_by,Param},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'}']}})
+ _ ->
+ parse_error(Rest2)
end;
parse_UserDefinedConstraint(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- ['CONSTRAINED BY {}','CONSTRAINED BY {']]}}).
+ parse_error(Tokens).
parse_UserDefinedConstraintParameter(Tokens) ->
- parse_UserDefinedConstraintParameter(Tokens,[]).
-parse_UserDefinedConstraintParameter(Tokens,Acc) ->
+ parse_UserDefinedConstraintParameter(Tokens, []).
+
+parse_UserDefinedConstraintParameter(Tokens0, Acc) ->
Flist = [fun parse_GovernorAndActualParameter/1,
fun parse_ActualParameter/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- {Result,Rest} ->
- case Rest of
- [{',',_}|_Rest2] ->
- parse_UserDefinedConstraintParameter(Tokens,[Result|Acc]);
- _ ->
- {lists:reverse([Result|Acc]),Rest}
- end
+ case parse_or(Tokens0, Flist) of
+ {Result,[{',',_}|Tokens]} ->
+ parse_UserDefinedConstraintParameter(Tokens, [Result|Acc]);
+ {Result,Tokens} ->
+ {lists:reverse(Acc, [Result]),Tokens}
end.
parse_GovernorAndActualParameter(Tokens) ->
@@ -1875,26 +1419,18 @@ parse_GovernorAndActualParameter(Tokens) ->
[{':',_}|Rest2] ->
{Params,Rest3} = parse_ActualParameter(Rest2),
{{'Governor_Params',Governor,Params},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,':']}})
+ _ ->
+ parse_error(Rest)
end.
parse_TableConstraint(Tokens) ->
Flist = [fun parse_ComponentRelationConstraint/1,
fun parse_SimpleTableConstraint/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_SimpleTableConstraint(Tokens) ->
{ObjectSet,Rest} = parse_ObjectSet(Tokens),
- {{simpletable,ObjectSet},Rest}.
+ {{element_set,{simpletable,ObjectSet},none},Rest}.
parse_ComponentRelationConstraint([{'{',_}|Rest]) ->
{ObjectSet,Rest2} = parse_DefinedObjectSet(Rest),
@@ -1903,21 +1439,18 @@ parse_ComponentRelationConstraint([{'{',_}|Rest]) ->
{AtNot,Rest4} = parse_AtNotationList(Rest3,[]),
case Rest4 of
[{'}',_}|Rest5] ->
- {{componentrelation,ObjectSet,AtNot},Rest5};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'}']}})
+ Ret = {element_set,
+ {componentrelation,ObjectSet,AtNot},
+ none},
+ {Ret,Rest5};
+ _ ->
+ parse_error(Rest4)
end;
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,
- 'ComponentRelationConstraint',ended,with,'}']}})
-%%% Other ->
-%%% throw(Other)
+ _ ->
+ parse_error(Rest2)
end;
parse_ComponentRelationConstraint(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'{']}}).
+ parse_error(Tokens).
parse_AtNotationList(Tokens,Acc) ->
{AtNot,Rest} = parse_AtNotation(Tokens),
@@ -1925,7 +1458,7 @@ parse_AtNotationList(Tokens,Acc) ->
[{',',_}|Rest2] ->
parse_AtNotationList(Rest2,[AtNot|Acc]);
_ ->
- {lists:reverse([AtNot|Acc]),Rest}
+ {lists:reverse(Acc, [AtNot]),Rest}
end.
parse_AtNotation([{'@',_},{'.',_}|Rest]) ->
@@ -1935,20 +1468,17 @@ parse_AtNotation([{'@',_}|Rest]) ->
{CIdList,Rest2} = parse_ComponentIdList(Rest),
{{outermost,CIdList},Rest2};
parse_AtNotation(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,['@','@.']]}}).
+ parse_error(Tokens).
parse_ComponentIdList(Tokens) ->
parse_ComponentIdList(Tokens,[]).
-parse_ComponentIdList([Id = {identifier,_,_},{'.',_}|Rest],Acc) ->
+parse_ComponentIdList([#identifier{}=Id,{'.',_}|Rest], Acc) ->
parse_ComponentIdList(Rest,[identifier2Extvalueref(Id)|Acc]);
-parse_ComponentIdList([Id = {identifier,_,_}|Rest],Acc) ->
- {lists:reverse([identifier2Extvalueref(Id)|Acc]),Rest};
+parse_ComponentIdList([#identifier{}=Id|Rest], Acc) ->
+ {lists:reverse(Acc, [identifier2Extvalueref(Id)]),Rest};
parse_ComponentIdList(Tokens,_) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- [identifier,'identifier.']]}}).
+ parse_error(Tokens).
parse_ContentsConstraint([{'CONTAINING',_}|Rest]) ->
{Type,Rest2} = parse_Type(Rest),
@@ -1963,24 +1493,14 @@ parse_ContentsConstraint([{'ENCODED',_},{'BY',_}|Rest]) ->
{Value,Rest2} = parse_Value(Rest),
{{contentsconstraint,[],Value},Rest2};
parse_ContentsConstraint(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- 'CONTAINING','or','ENCODED BY']}}).
-
+ parse_error(Tokens).
% X.683 Parameterization of ASN.1 specifications
parse_Governor(Tokens) ->
Flist = [fun parse_Type/1,
fun parse_DefinedObjectClass/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_ActualParameter(Tokens) ->
Flist = [fun parse_Type/1,
@@ -1989,32 +1509,7 @@ parse_ActualParameter(Tokens) ->
fun parse_DefinedObjectClass/1,
fun parse_Object/1,
fun parse_ObjectSet/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
-
-parse_ParameterizedAssignment(Tokens) ->
- Flist = [fun parse_ParameterizedTypeAssignment/1,
- fun parse_ParameterizedValueAssignment/1,
- fun parse_ParameterizedValueSetTypeAssignment/1,
- fun parse_ParameterizedObjectClassAssignment/1,
- fun parse_ParameterizedObjectAssignment/1,
- fun parse_ParameterizedObjectSetAssignment/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- AsnAssErr = {asn1_assignment_error,_} ->
- throw(AsnAssErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
%% parse_ParameterizedTypeAssignment(Tokens) -> Result
%% Result = {#ptypedef{},Rest} | throw()
@@ -2025,18 +1520,13 @@ parse_ParameterizedTypeAssignment([{typereference,L1,Name}|Rest]) ->
{Type,Rest4} = parse_Type(Rest3),
{#ptypedef{pos=L1,name=Name,args=ParameterList,typespec=Type},
Rest4};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'::=']}})
- end;
-parse_ParameterizedTypeAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- typereference]}}).
+ _ ->
+ parse_error(Rest2)
+ end.
%% parse_ParameterizedValueAssignment(Tokens) -> Result
%% Result = {#pvaluedef{},Rest} | throw()
-parse_ParameterizedValueAssignment([{identifier,L1,Name}|Rest]) ->
+parse_ParameterizedValueAssignment([#identifier{pos=L1,val=Name}|Rest]) ->
{ParameterList,Rest2} = parse_ParameterList(Rest),
{Type,Rest3} = parse_Type(Rest2),
case Rest3 of
@@ -2044,13 +1534,9 @@ parse_ParameterizedValueAssignment([{identifier,L1,Name}|Rest]) ->
{Value,Rest5} = parse_Value(Rest4),
{#pvaluedef{pos=L1,name=Name,args=ParameterList,type=Type,
value=Value},Rest5};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'::=']}})
- end;
-parse_ParameterizedValueAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,identifier]}}).
+ _ ->
+ parse_error(Rest3)
+ end.
%% parse_ParameterizedValueSetTypeAssignment(Tokens) -> Result
%% Result = {#pvaluesetdef{},Rest} | throw()
@@ -2062,14 +1548,9 @@ parse_ParameterizedValueSetTypeAssignment([{typereference,L1,Name}|Rest]) ->
{ValueSet,Rest5} = parse_ValueSet(Rest4),
{#pvaluesetdef{pos=L1,name=Name,args=ParameterList,
type=Type,valueset=ValueSet},Rest5};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'::=']}})
- end;
-parse_ParameterizedValueSetTypeAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- typereference]}}).
+ _ ->
+ parse_error(Rest3)
+ end.
%% parse_ParameterizedObjectClassAssignment(Tokens) -> Result
%% Result = {#ptypedef{},Rest} | throw()
@@ -2080,18 +1561,13 @@ parse_ParameterizedObjectClassAssignment([{typereference,L1,Name}|Rest]) ->
{Class,Rest4} = parse_ObjectClass(Rest3),
{#ptypedef{pos=L1,name=Name,args=ParameterList,typespec=Class},
Rest4};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'::=']}})
- end;
-parse_ParameterizedObjectClassAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- typereference]}}).
+ _ ->
+ parse_error(Rest2)
+ end.
%% parse_ParameterizedObjectAssignment(Tokens) -> Result
%% Result = {#pobjectdef{},Rest} | throw()
-parse_ParameterizedObjectAssignment([{identifier,L1,Name}|Rest]) ->
+parse_ParameterizedObjectAssignment([#identifier{pos=L1,val=Name}|Rest]) ->
{ParameterList,Rest2} = parse_ParameterList(Rest),
{Class,Rest3} = parse_DefinedObjectClass(Rest2),
case Rest3 of
@@ -2099,36 +1575,9 @@ parse_ParameterizedObjectAssignment([{identifier,L1,Name}|Rest]) ->
{Object,Rest5} = parse_Object(Rest4),
{#pobjectdef{pos=L1,name=Name,args=ParameterList,
class=Class,def=Object},Rest5};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'::=']}})
-%%% Other ->
-%%% throw(Other)
- end;
-parse_ParameterizedObjectAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,identifier]}}).
-
-%% parse_ParameterizedObjectSetAssignment(Tokens) -> Result
-%% Result = {#pobjectsetdef{},Rest} | throw{}
-parse_ParameterizedObjectSetAssignment([{typereference,L1,Name}|Rest]) ->
- {ParameterList,Rest2} = parse_ParameterList(Rest),
- {Class,Rest3} = parse_DefinedObjectClass(Rest2),
- case Rest3 of
- [{'::=',_}|Rest4] ->
- {ObjectSet,Rest5} = parse_ObjectSet(Rest4),
- {#pobjectsetdef{pos=L1,name=Name,args=ParameterList,
- class=Class,def=ObjectSet},Rest5};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'::=']}})
-%%% Other ->
-%%% throw(Other)
- end;
-parse_ParameterizedObjectSetAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- typereference]}}).
+ _ ->
+ parse_error(Rest3)
+ end.
%% parse_ParameterList(Tokens) -> Result
%% Result = [Parameter]
@@ -2137,35 +1586,24 @@ parse_ParameterizedObjectSetAssignment(Tokens) ->
%% Type = #type{}
%% DefinedObjectClass = #'Externaltypereference'{}
%% Reference = #'Externaltypereference'{} | #'Externalvaluereference'{}
-parse_ParameterList([{'{',_}|Rest]) ->
- parse_ParameterList(Rest,[]);
-parse_ParameterList(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'{']}}).
+parse_ParameterList([{'{',_}|Tokens]) ->
+ parse_ParameterList(Tokens, []).
parse_ParameterList(Tokens,Acc) ->
{Parameter,Rest} = parse_Parameter(Tokens),
case Rest of
[{',',_}|Rest2] ->
- parse_ParameterList(Rest2,[Parameter|Acc]);
+ parse_ParameterList(Rest2, [Parameter|Acc]);
[{'}',_}|Rest3] ->
- {lists:reverse([Parameter|Acc]),Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,[',','}']]}})
+ {lists:reverse(Acc, [Parameter]),Rest3};
+ _ ->
+ parse_error(Rest)
end.
parse_Parameter(Tokens) ->
Flist = [fun parse_ParamGovAndRef/1,
fun parse_Reference/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_ParamGovAndRef(Tokens) ->
{ParamGov,Rest} = parse_ParamGovernor(Tokens),
@@ -2173,86 +1611,54 @@ parse_ParamGovAndRef(Tokens) ->
[{':',_}|Rest2] ->
{Ref,Rest3} = parse_Reference(Rest2),
{{ParamGov,Ref},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,':']}})
+ _ ->
+ parse_error(Rest)
end.
parse_ParamGovernor(Tokens) ->
Flist = [fun parse_Governor/1,
fun parse_Reference/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
-
-% parse_ParameterizedReference(Tokens) ->
-% {Ref,Rest} = parse_Reference(Tokens),
-% case Rest of
-% [{'{',_},{'}',_}|Rest2] ->
-% {{ptref,Ref},Rest2};
-% _ ->
-% {{ptref,Ref},Rest}
-% end.
+ parse_or(Tokens, Flist).
parse_SimpleDefinedType([{typereference,L1,ModuleName},{'.',_},
{typereference,_,TypeName}|Rest]) ->
{#'Externaltypereference'{pos=L1,module=ModuleName,
type=TypeName},Rest};
parse_SimpleDefinedType([Tref={typereference,_,_}|Rest]) ->
-% {#'Externaltypereference'{pos=L2,module=get(asn1_module),
-% type=TypeName},Rest};
{tref2Exttref(Tref),Rest};
parse_SimpleDefinedType(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- [typereference,'typereference.typereference']]}}).
+ parse_error(Tokens).
parse_SimpleDefinedValue([{typereference,L1,ModuleName},{'.',_},
- {identifier,_,Value}|Rest]) ->
+ #identifier{val=Value}|Rest]) ->
{{simpledefinedvalue,#'Externalvaluereference'{pos=L1,module=ModuleName,
value=Value}},Rest};
-parse_SimpleDefinedValue([Id={identifier,_,_Value}|Rest]) ->
+parse_SimpleDefinedValue([#identifier{}=Id|Rest]) ->
{{simpledefinedvalue,identifier2Extvalueref(Id)},Rest};
parse_SimpleDefinedValue(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- ['typereference.identifier',identifier]]}}).
+ parse_error(Tokens).
parse_ParameterizedType(Tokens) ->
+ %% May also be a parameterized class.
{Type,Rest} = parse_SimpleDefinedType(Tokens),
{Params,Rest2} = parse_ActualParameterList(Rest),
- {{pt,Type,Params},Rest2}.
+ {#type{def={pt,Type,Params}},Rest2}.
parse_ParameterizedValue(Tokens) ->
+ %% May also be a parameterized object.
{Value,Rest} = parse_SimpleDefinedValue(Tokens),
{Params,Rest2} = parse_ActualParameterList(Rest),
{{pv,Value,Params},Rest2}.
-parse_ParameterizedObjectClass(Tokens) ->
- {Type,Rest} = parse_DefinedObjectClass(Tokens),
- {Params,Rest2} = parse_ActualParameterList(Rest),
- {{poc,Type,Params},Rest2}.
-
parse_ParameterizedObjectSet(Tokens) ->
{ObjectSet,Rest} = parse_DefinedObjectSet(Tokens),
{Params,Rest2} = parse_ActualParameterList(Rest),
{{pos,ObjectSet,Params},Rest2}.
-parse_ParameterizedObject(Tokens) ->
- {Object,Rest} = parse_DefinedObject(Tokens),
- {Params,Rest2} = parse_ActualParameterList(Rest),
- {{po,Object,Params},Rest2}.
-
parse_ActualParameterList([{'{',_}|Rest]) ->
parse_ActualParameterList(Rest,[]);
parse_ActualParameterList(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'{']}}).
+ parse_error(Tokens).
parse_ActualParameterList(Tokens,Acc) ->
{Parameter,Rest} = parse_ActualParameter(Tokens),
@@ -2260,43 +1666,22 @@ parse_ActualParameterList(Tokens,Acc) ->
[{',',_}|Rest2] ->
parse_ActualParameterList(Rest2,[Parameter|Acc]);
[{'}',_}|Rest3] ->
- {lists:reverse([Parameter|Acc]),Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,[',','}']]}})
-%%% Other ->
-%%% throw(Other)
+ {lists:reverse(Acc, [Parameter]),Rest3};
+ _ ->
+ parse_error(Rest)
end.
-
-
-
-
-
-
-%-------------------------
-
+%% Test whether Token is allowed in a syntax list.
is_word(Token) ->
- case not_allowed_word(Token) of
+ List = atom_to_list(Token),
+ case not_allowed_word(List) of
true -> false;
- _ ->
- if
- is_atom(Token) ->
- Item = atom_to_list(Token),
- is_word(Item);
- is_list(Token), length(Token) == 1 ->
- check_one_char_word(Token);
- is_list(Token) ->
- [A|Rest] = Token,
- case check_first(A) of
- true ->
- check_rest(Rest);
- _ ->
- false
- end
- end
+ false -> is_word_1(List)
end.
+is_word_1([H|T]) ->
+ check_first(H) andalso check_rest(T).
+
not_allowed_word(Name) ->
lists:member(Name,["BIT",
"BOOLEAN",
@@ -2321,257 +1706,123 @@ not_allowed_word(Name) ->
"TRUE",
"UNION"]).
-check_one_char_word([A]) when $A =< A, $Z >= A ->
- true;
-check_one_char_word([_]) ->
- false. %% unknown item in SyntaxList
+check_first(C) ->
+ $A =< C andalso C =< $Z.
-check_first(A) when $A =< A, $Z >= A ->
- true;
-check_first(_) ->
- false. %% unknown item in SyntaxList
-
-check_rest([R,R|_Rs]) when $- == R ->
- false; %% two consecutive hyphens are not allowed in a word
-check_rest([R]) when $- == R ->
- false; %% word cannot end with hyphen
-check_rest([R|Rs]) when $A=<R, $Z>=R; $-==R ->
+check_rest([R|Rs]) when $A =< R, R =< $Z; R =:= $- ->
check_rest(Rs);
check_rest([]) ->
true;
check_rest(_) ->
false.
+%%%
+%%% Parse alternative type lists for CHOICE.
+%%%
+
+parse_AlternativeTypeLists(Tokens0) ->
+ {Root,Tokens1} = parse_AlternativeTypeList(Tokens0),
+ case Tokens1 of
+ [{',',_}|Tokens2] ->
+ {ExtMarker,Tokens3} = parse_ExtensionAndException(Tokens2),
+ {ExtAlts,Tokens4} = parse_ExtensionAdditionAlternatives(Tokens3),
+ {_,Tokens} = parse_OptionalExtensionMarker(Tokens4, []),
+ {Root++ExtMarker++ExtAlts,Tokens};
+ Tokens ->
+ {Root,Tokens}
+ end.
+
+parse_ExtensionAndException([{'...',L}|Tokens0]) ->
+ {[#'EXTENSIONMARK'{pos=L}],
+ case Tokens0 of
+ [{'!',_}|Tokens1] ->
+ {_,Tokens} = parse_ExceptionIdentification(Tokens1),
+ Tokens;
+ _ ->
+ Tokens0
+ end}.
+
+parse_AlternativeTypeList([#identifier{}|_]=Tokens0) ->
+ {AltType,Tokens} = parse_NamedType(Tokens0),
+ parse_AlternativeTypeList_1(Tokens, [AltType]);
+parse_AlternativeTypeList(Tokens) ->
+ parse_error(Tokens).
+
+parse_AlternativeTypeList_1([{',',_}|[#identifier{}|_]=Tokens0], Acc) ->
+ {AltType,Tokens} = parse_NamedType(Tokens0),
+ parse_AlternativeTypeList_1(Tokens, [AltType|Acc]);
+parse_AlternativeTypeList_1(Tokens, Acc) ->
+ {lists:reverse(Acc),Tokens}.
-to_set(V) when is_list(V) ->
- ordsets:from_list(V);
-to_set(V) ->
- ordsets:from_list([V]).
-
-parse_AlternativeTypeLists(Tokens) ->
- parse_AlternativeTypeLists(Tokens,[]).
-
-parse_AlternativeTypeLists(Tokens = [{identifier,_,_}|_Rest0],Clist) ->
- {CompList,Rest1} = parse_AlternativeTypeList(Tokens,[]),
- parse_AlternativeTypeLists(Rest1,Clist++CompList);
-parse_AlternativeTypeLists([{'...',L1},{'!',_}|Rest02],Clist0) ->
- {_,Rest03} = parse_ExceptionIdentification(Rest02),
- %% Exception info is currently thrown away
- parse_AlternativeTypeLists2(Rest03,Clist0++[#'EXTENSIONMARK'{pos=L1}]);
-parse_AlternativeTypeLists([{',',L1},{'...',_},{'!',_}|Rest02],Clist0) when Clist0 =/= []->
- {_,Rest03} = parse_ExceptionIdentification(Rest02),
- %% Exception info is currently thrown away
- parse_AlternativeTypeLists2(Rest03,Clist0++[#'EXTENSIONMARK'{pos=L1}]);
-
-parse_AlternativeTypeLists([{',',_},{'...',L1}|Rest02],Clist0) when Clist0 =/= []->
- parse_AlternativeTypeLists2(Rest02,Clist0++[#'EXTENSIONMARK'{pos=L1}]);
-parse_AlternativeTypeLists([{'...',L1}|Rest02],Clist0) ->
- parse_AlternativeTypeLists2(Rest02,Clist0++[#'EXTENSIONMARK'{pos=L1}]);
-parse_AlternativeTypeLists(Tokens = [{'}',_L1}|_Rest02],Clist0) ->
- {Clist0,Tokens}.
-
-parse_AlternativeTypeLists2(Tokens,Clist) ->
- {ExtAdd,Rest} = parse_ExtensionAdditionAlternatives(Tokens,Clist),
- {Clist2,Rest2} = parse_OptionalExtensionMarker(Rest,lists:flatten(ExtAdd)),
- case Rest2 of
- [{',',_}|Rest3] ->
- {CompList,Rest4} = parse_AlternativeTypeList(Rest3,[]),
- {Clist2 ++ CompList,Rest4};
- _ ->
- {Clist2,Rest2}
- end.
-
-
-
-parse_AlternativeTypeList([{',',_},Id = {identifier,_,_}|Rest],Acc) when Acc =/= [] ->
- {AlternativeType,Rest2} = parse_NamedType([Id|Rest]),
- parse_AlternativeTypeList(Rest2,[AlternativeType|Acc]);
-parse_AlternativeTypeList(Tokens = [{'}',_}|_],Acc) ->
- {lists:reverse(Acc),Tokens};
-parse_AlternativeTypeList(Tokens = [{']',_},{']',_}|_],Acc) ->
- {lists:reverse(Acc),Tokens};
-parse_AlternativeTypeList(Tokens = [{',',_},{'...',_}|_],Acc) ->
- {lists:reverse(Acc),Tokens};
-parse_AlternativeTypeList(Tokens,[]) ->
- {AlternativeType,Rest} = parse_NamedType(Tokens),
- parse_AlternativeTypeList(Rest,[AlternativeType]);
-parse_AlternativeTypeList(Tokens,_) ->
- throw({asn1_error,
- {get_line(hd(Tokens)),get(asn1_module),
- [got,[get_token(hd(Tokens)),get_token(hd(tl(Tokens)))],
- expected,['}',', identifier']]}}).
-
-parse_ExtensionAdditionAlternatives(Tokens =[{',',_}|_],Clist) ->
- {ExtAddList,Rest2} = parse_ExtensionAdditionAlternativesList(Tokens,[]),
- {Clist++lists:flatten(ExtAddList),Rest2};
-parse_ExtensionAdditionAlternatives(Tokens,Clist) ->
- %% Empty
- {Clist,Tokens}.
+parse_ExtensionAdditionAlternatives([{',',_}|_]=Tokens0) ->
+ parse_ExtensionAdditionAlternativesList(Tokens0, []);
+parse_ExtensionAdditionAlternatives(Tokens) ->
+ {[],Tokens}.
-parse_ExtensionAdditionAlternativesList([{',',_},Id = {identifier,_,_}|Rest],Acc) ->
- {AlternativeType,Rest2} = parse_NamedType([Id|Rest]),
- parse_ExtensionAdditionAlternativesList(Rest2,[AlternativeType|Acc]);
-parse_ExtensionAdditionAlternativesList([{',',_},C1 = {'[',_},C2 = {'[',_}|Rest],Acc) ->
- {ExtAddGroup,Rest2} = parse_ExtensionAdditionAlternativesGroup([C1,C2|Rest],[]),
- parse_ExtensionAdditionAlternativesList(Rest2,[ExtAddGroup|Acc]);
-parse_ExtensionAdditionAlternativesList(Tokens = [{'}',_}|_],Acc) ->
- {lists:reverse(Acc),Tokens};
-parse_ExtensionAdditionAlternativesList(Tokens = [{',',_},{'...',_}|_],Acc) ->
- {lists:reverse(Acc),Tokens};
-parse_ExtensionAdditionAlternativesList(Tokens,_) ->
- throw({asn1_error,
- {get_line(hd(Tokens)),get(asn1_module),
- [got,[get_token(hd(Tokens)),get_token(hd(tl(Tokens)))],
- expected,['}',', identifier']]}}).
-
-
-parse_ExtensionAdditionAlternativesGroup([ {'[',_},{'[',_},_VsnNr = {number,_,Num},{':',_}|Rest],[]) ->
- parse_ExtensionAdditionAlternativesGroup2(Rest,Num);
-parse_ExtensionAdditionAlternativesGroup([ {'[',_},{'[',_}|Rest],[]) ->
- parse_ExtensionAdditionAlternativesGroup2(Rest,undefined);
-parse_ExtensionAdditionAlternativesGroup(Tokens,_) ->
- throw({asn1_error,
- {get_line(hd(Tokens)),get(asn1_module),
- [got,[get_token(hd(Tokens)),get_token(hd(tl(Tokens)))],
- expected,['[[']]}}).
-
-
-parse_ExtensionAdditionAlternativesGroup2(Tokens,Num) ->
- {CompTypeList,Rest} = parse_AlternativeTypeList(Tokens,[]),
- case Rest of
- [{']',_},{']',_}|Rest2] ->
- {[{'ExtensionAdditionGroup',Num}|CompTypeList] ++
- ['ExtensionAdditionGroupEnd'],Rest2};
+parse_ExtensionAdditionAlternativesList([{',',_}|Tokens1]=Tokens0, Acc) ->
+ try parse_ExtensionAdditionAlternative(Tokens1) of
+ {ExtAddAlt,Tokens2} ->
+ parse_ExtensionAdditionAlternativesList(Tokens2, [ExtAddAlt|Acc])
+ catch
+ throw:{asn1_error,_} ->
+ {lists:append(lists:reverse(Acc)),Tokens0}
+ end;
+parse_ExtensionAdditionAlternativesList(Tokens, Acc) ->
+ {lists:append(lists:reverse(Acc)),Tokens}.
+
+parse_ExtensionAdditionAlternative([#identifier{}|_]=Tokens0) ->
+ {NamedType,Tokens} = parse_NamedType(Tokens0),
+ {[NamedType],Tokens};
+parse_ExtensionAdditionAlternative([{'[',_},{'[',_}|Tokens0]) ->
+ Tokens2 = case Tokens0 of
+ [{number,_,_},{':',_}|Tokens1] -> Tokens1;
+ _ -> Tokens0
+ end,
+ {GroupList,Tokens3} = parse_AlternativeTypeList(Tokens2),
+ case Tokens3 of
+ [{']',_},{']',_}|Tokens] ->
+ {GroupList,Tokens};
_ ->
- throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
- [got,get_token(hd(Rest)),expected,[']]']]}})
- end.
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-%% parse_AlternativeTypeLists(Tokens,ExtensionDefault) ->
-%% {AltTypeList,Rest1} = parse_AlternativeTypeList(Tokens),
-%% {ExtensionAndException,Rest2} =
-%% case Rest1 of
-%% [{',',_},{'...',L1},{'!',_}|Rest12] ->
-%% {_,Rest13} = parse_ExceptionIdentification(Rest12),
-%% %% Exception info is currently thrown away
-%% {[#'EXTENSIONMARK'{pos=L1}],Rest13};
-%% [{',',_},{'...',L1}|Rest12] ->
-%% {[#'EXTENSIONMARK'{pos=L1}],Rest12};
-%% _ ->
-%% {[],Rest1}
-%% end,
-%% {AltTypeList2,Rest5} =
-%% case ExtensionAndException of
-%% [] ->
-%% {AltTypeList,Rest2};
-%% _ ->
-%% {ExtensionAddition,Rest3} =
-%% case Rest2 of
-%% [{',',_}|Rest23] ->
-%% parse_ExtensionAdditionAlternativeList(Rest23);
-%% _ ->
-%% {[],Rest2}
-%% end,
-%% {OptionalExtensionMarker,Rest4} =
-%% case Rest3 of
-%% [{',',_},{'...',L3}|Rest31] ->
-%% {[#'EXTENSIONMARK'{pos=L3}],Rest31};
-%% _ ->
-%% {[],Rest3}
-%% end,
-%% {AltTypeList ++ ExtensionAndException ++
-%% ExtensionAddition ++ OptionalExtensionMarker, Rest4}
-%% end,
-%% AltTypeList3 =
-%% case [X || X=#'EXTENSIONMARK'{} <- AltTypeList2] of
-%% [] when ExtensionDefault == 'IMPLIED' ->
-%% AltTypeList2 ++ [#'EXTENSIONMARK'{}];
-%% _ ->
-%% AltTypeList2
-%% end,
-%% {AltTypeList3,Rest5}.
-
-
-%% parse_AlternativeTypeList(Tokens) ->
-%% parse_AlternativeTypeList(Tokens,[]).
+ parse_error(Tokens3)
+ end;
+parse_ExtensionAdditionAlternative(Tokens) ->
+ parse_error(Tokens).
-%% parse_AlternativeTypeList(Tokens,Acc) ->
-%% {NamedType,Rest} = parse_NamedType(Tokens),
-%% case Rest of
-%% [{',',_},Id = {identifier,_,_}|Rest2] ->
-%% parse_AlternativeTypeList([Id|Rest2],[NamedType|Acc]);
-%% _ ->
-%% {lists:reverse([NamedType|Acc]),Rest}
-%% end.
+%%%
+%%% End of parsing of alternative type lists.
+%%%
-
-
-%% parse_ExtensionAdditionAlternativeList(Tokens) ->
-%% parse_ExtensionAdditionAlternativeList(Tokens,[]).
-
-%% parse_ExtensionAdditionAlternativeList([{'[[',_}|Rest],Acc) ->
-%% parse_ExtensionAdditionAlternativeList(Rest,Acc);
-%% parse_ExtensionAdditionAlternativeList(Tokens = [{identifier,_,_}|_Rest],Acc) ->
-%% {Element,Rest0} = parse_NamedType(Tokens);
-%% case Rest0 of
-%% [{',',_}|Rest01] ->
-%% parse_ExtensionAdditionAlternativeList(Rest01,[Element|Acc]);
-%% _ ->
-%% {lists:reverse([Element|Acc]),Rest0}
-%% end.
-
-%% parse_ExtensionAdditionAlternatives([{'[[',_}|Rest]) ->
-%% parse_ExtensionAdditionAlternatives(Rest,[]);
-%% parse_ExtensionAdditionAlternatives(Tokens) ->
-%% throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
-%% [got,get_token(hd(Tokens)),expected,'[[']}}).
-
-%% parse_ExtensionAdditionAlternatives([Id = {identifier,_,_}|Rest],Acc) ->
-%% {NamedType, Rest2} = parse_NamedType([Id|Rest]),
-%% case Rest2 of
-%% [{',',_}|Rest21] ->
-%% parse_ExtensionAdditionAlternatives(Rest21,[NamedType|Acc]);
-%% [{']]',_}|Rest21] ->
-%% {lists:reverse(Acc),Rest21};
-%% _ ->
-%% throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
-%% [got,get_token(hd(Rest2)),expected,[',',']]']]}})
-%% end.
-
-parse_NamedType([{identifier,L1,Idname}|Rest]) ->
+parse_NamedType([#identifier{pos=L1,val=Idname}|Rest]) ->
{Type,Rest2} = parse_Type(Rest),
{#'ComponentType'{pos=L1,name=Idname,typespec=Type,prop=mandatory},Rest2};
parse_NamedType(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,identifier]}}).
+ parse_error(Tokens).
+%%%
+%%% Parse component type lists for SEQUENCE and SET.
+%%%
parse_ComponentTypeLists(Tokens) ->
- parse_ComponentTypeLists(Tokens,[]).
+ parse_ComponentTypeLists(Tokens, []).
-parse_ComponentTypeLists(Tokens = [{identifier,_,_}|_Rest0],Clist) ->
- {CompList,Rest1} = parse_ComponentTypeList(Tokens,[]),
- parse_ComponentTypeLists(Rest1,Clist++CompList);
-parse_ComponentTypeLists(Tokens = [{'COMPONENTS',_},{'OF',_}|_Rest],Clist) ->
+parse_ComponentTypeLists([#identifier{}|_Rest0]=Tokens, Clist) ->
{CompList,Rest1} = parse_ComponentTypeList(Tokens,[]),
parse_ComponentTypeLists(Rest1,Clist++CompList);
-parse_ComponentTypeLists([{'...',L1},{'!',_}|Rest02],Clist0) ->
- {_,Rest03} = parse_ExceptionIdentification(Rest02),
- %% Exception info is currently thrown away
- parse_ComponentTypeLists2(Rest03,Clist0++[#'EXTENSIONMARK'{pos=L1}]);
+parse_ComponentTypeLists([{'COMPONENTS',_},{'OF',_}|_]=Tokens,Clist) ->
+ {CompList,Rest1} = parse_ComponentTypeList(Tokens, []),
+ parse_ComponentTypeLists(Rest1, Clist++CompList);
parse_ComponentTypeLists([{',',L1},{'...',_},{'!',_}|Rest02],Clist0) when Clist0 =/= []->
{_,Rest03} = parse_ExceptionIdentification(Rest02),
%% Exception info is currently thrown away
parse_ComponentTypeLists2(Rest03,Clist0++[#'EXTENSIONMARK'{pos=L1}]);
-
- parse_ComponentTypeLists([{',',_},{'...',L1}|Rest02],Clist0) when Clist0 =/= []->
+parse_ComponentTypeLists([{',',_},{'...',L1}|Rest02],Clist0) when Clist0 =/= []->
parse_ComponentTypeLists2(Rest02,Clist0++[#'EXTENSIONMARK'{pos=L1}]);
parse_ComponentTypeLists([{'...',L1}|Rest02],Clist0) ->
parse_ComponentTypeLists2(Rest02,Clist0++[#'EXTENSIONMARK'{pos=L1}]);
parse_ComponentTypeLists(Tokens = [{'}',_L1}|_Rest02],Clist0) ->
- {Clist0,Tokens}.
+ {Clist0,Tokens};
+parse_ComponentTypeLists(Tokens, _) ->
+ parse_error(Tokens).
parse_ComponentTypeLists2(Tokens,Clist) ->
{ExtAdd,Rest} = parse_ExtensionAdditions(Tokens,Clist),
@@ -2590,12 +1841,12 @@ parse_OptionalExtensionMarker(Tokens,Clist) ->
{Clist,Tokens}.
-parse_ComponentTypeList([{',',_},Id = {identifier,_,_}|Rest],Acc) when Acc =/= [] ->
- {ComponentType,Rest2} = parse_ComponentType([Id|Rest]),
- parse_ComponentTypeList(Rest2,[ComponentType|Acc]);
-parse_ComponentTypeList([{',',_},C1={'COMPONENTS',_},C2={'OF',_}|Rest],Acc) when Acc =/= [] ->
- {ComponentType,Rest2} = parse_ComponentType([C1,C2|Rest]),
- parse_ComponentTypeList(Rest2,[ComponentType|Acc]);
+parse_ComponentTypeList([{',',_}|[#identifier{}|_]=Tokens0], Acc) when Acc =/= [] ->
+ {ComponentType,Tokens} = parse_ComponentType(Tokens0),
+ parse_ComponentTypeList(Tokens, [ComponentType|Acc]);
+parse_ComponentTypeList([{',',_}|[{'COMPONENTS',_},{'OF',_}|_]=Tokens0], Acc) when Acc =/= [] ->
+ {ComponentType,Tokens} = parse_ComponentType(Tokens0),
+ parse_ComponentTypeList(Tokens, [ComponentType|Acc]);
parse_ComponentTypeList(Tokens = [{'}',_}|_],Acc) ->
{lists:reverse(Acc),Tokens};
parse_ComponentTypeList(Tokens = [{']',_},{']',_}|_],Acc) ->
@@ -2606,10 +1857,7 @@ parse_ComponentTypeList(Tokens,[]) ->
{ComponentType,Rest} = parse_ComponentType(Tokens),
parse_ComponentTypeList(Rest,[ComponentType]);
parse_ComponentTypeList(Tokens,_) ->
- throw({asn1_error,
- {get_line(hd(Tokens)),get(asn1_module),
- [got,[get_token(hd(Tokens)),get_token(hd(tl(Tokens)))],
- expected,['}',', identifier']]}}).
+ parse_error(Tokens).
parse_ExtensionAdditions(Tokens=[{',',_}|_],Clist) ->
{ExtAddList,Rest2} = parse_ExtensionAdditionList(Tokens,[]),
@@ -2618,46 +1866,36 @@ parse_ExtensionAdditions(Tokens,Clist) ->
%% Empty
{Clist,Tokens}.
-parse_ExtensionAdditionList([{',',_},Id = {identifier,_,_}|Rest],Acc) ->
- {ComponentType,Rest2} = parse_ComponentType([Id|Rest]),
- parse_ExtensionAdditionList(Rest2,[ComponentType|Acc]);
-parse_ExtensionAdditionList([{',',_},C1={'COMPONENTS',_},C2={'OF',_}|Rest],Acc) ->
- {ComponentType,Rest2} = parse_ComponentType([C1,C2|Rest]),
- parse_ExtensionAdditionList(Rest2,[ComponentType|Acc]);
-parse_ExtensionAdditionList([{',',_},C1 = {'[',_},C2 = {'[',_}|Rest],Acc) ->
- {ExtAddGroup,Rest2} = parse_ExtensionAdditionGroup([C1,C2|Rest],[]),
+parse_ExtensionAdditionList([{',',_}|[#identifier{}|_]=Tokens0], Acc) ->
+ {ComponentType,Tokens} = parse_ComponentType(Tokens0),
+ parse_ExtensionAdditionList(Tokens, [ComponentType|Acc]);
+parse_ExtensionAdditionList([{',',_}|[{'COMPONENTS',_},{'OF',_}|_]=Tokens0], Acc) ->
+ {ComponentType,Tokens} = parse_ComponentType(Tokens0),
+ parse_ExtensionAdditionList(Tokens, [ComponentType|Acc]);
+parse_ExtensionAdditionList([{',',_},{'[',_},{'[',_}|Tokens], Acc) ->
+ {ExtAddGroup,Rest2} = parse_ExtensionAdditionGroup(Tokens),
parse_ExtensionAdditionList(Rest2,[ExtAddGroup|Acc]);
-parse_ExtensionAdditionList(Tokens = [{'}',_}|_],Acc) ->
+parse_ExtensionAdditionList([{'}',_}|_]=Tokens, Acc) ->
{lists:reverse(Acc),Tokens};
-parse_ExtensionAdditionList(Tokens = [{',',_},{'...',_}|_],Acc) ->
+parse_ExtensionAdditionList([{',',_},{'...',_}|_]=Tokens, Acc) ->
{lists:reverse(Acc),Tokens};
-parse_ExtensionAdditionList(Tokens,_) ->
- throw({asn1_error,
- {get_line(hd(Tokens)),get(asn1_module),
- [got,[get_token(hd(Tokens)),get_token(hd(tl(Tokens)))],
- expected,['}',', identifier']]}}).
-
+parse_ExtensionAdditionList(Tokens, _) ->
+ parse_error(Tokens).
-parse_ExtensionAdditionGroup([ {'[',_},{'[',_},_VsnNr = {number,_,Num},{':',_}|Rest],[]) ->
- parse_ExtensionAdditionGroup2(Rest,Num);
-parse_ExtensionAdditionGroup([ {'[',_},{'[',_}|Rest],[]) ->
- parse_ExtensionAdditionGroup2(Rest,undefined);
-parse_ExtensionAdditionGroup(Tokens,_) ->
- throw({asn1_error,
- {get_line(hd(Tokens)),get(asn1_module),
- [got,[get_token(hd(Tokens)),get_token(hd(tl(Tokens)))],
- expected,['[[']]}}).
+parse_ExtensionAdditionGroup([{number,_,Num},{':',_}|Tokens]) ->
+ parse_ExtensionAdditionGroup2(Tokens, Num);
+parse_ExtensionAdditionGroup(Tokens) ->
+ parse_ExtensionAdditionGroup2(Tokens, undefined).
-parse_ExtensionAdditionGroup2(Tokens,Num) ->
+parse_ExtensionAdditionGroup2(Tokens, Num) ->
{CompTypeList,Rest} = parse_ComponentTypeList(Tokens,[]),
case Rest of
[{']',_},{']',_}|Rest2] ->
{[{'ExtensionAdditionGroup',Num}|CompTypeList] ++
['ExtensionAdditionGroupEnd'],Rest2};
_ ->
- throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
- [got,get_token(hd(Rest)),expected,[']]']]}})
+ parse_error(Rest)
end.
@@ -2676,83 +1914,81 @@ parse_ComponentType(Tokens) ->
Result
end.
-
+%%%
+%%% Parse ENUMERATED.
+%%%
-parse_SignedNumber([{number,_,Value}|Rest]) ->
- {Value,Rest};
-parse_SignedNumber([{'-',_},{number,_,Value}|Rest]) ->
- {-Value,Rest};
-parse_SignedNumber(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- [number,'-number']]}}).
-
-parse_Enumerations(Tokens=[{identifier,_,_}|_Rest],ExtensionDefault) ->
- parse_Enumerations(Tokens,[],ExtensionDefault);
-parse_Enumerations([H|_T],_) ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,identifier]}}).
-
-parse_Enumerations(Tokens = [{identifier,_,_},{'(',_}|_Rest], Acc, ExtensionDefault) ->
- {NamedNumber,Rest2} = parse_NamedNumber(Tokens),
- case Rest2 of
- [{',',_}|Rest3] ->
- parse_Enumerations(Rest3,[NamedNumber|Acc], ExtensionDefault);
- _ when ExtensionDefault == 'IMPLIED'->
- {lists:reverse(['EXTENSIONMARK',NamedNumber|Acc]),Rest2};
+parse_Enumerations(Tokens0) ->
+ {Root,Tokens1} = parse_Enumeration(Tokens0),
+ case Tokens1 of
+ [{',',_},{'...',_},{',',_}|Tokens2] ->
+ {Ext,Tokens} = parse_Enumeration(Tokens2),
+ {Root++['EXTENSIONMARK'|Ext],Tokens};
+ [{',',_},{'...',_}|Tokens] ->
+ {Root++['EXTENSIONMARK'],Tokens};
_ ->
- {lists:reverse([NamedNumber|Acc]),Rest2}
- end;
-parse_Enumerations([{identifier,_,Id}|Rest], Acc, ExtensionDefault) ->
- case Rest of
- [{',',_}|Rest2] ->
- parse_Enumerations(Rest2,[Id|Acc], ExtensionDefault);
- _ when ExtensionDefault == 'IMPLIED' ->
- {lists:reverse(['EXTENSIONMARK', Id |Acc]),Rest};
- _ ->
- {lists:reverse([Id|Acc]),Rest}
- end;
-parse_Enumerations([{'...',_}|Rest], Acc, _ExtensionDefault) ->
- case Rest of
- [{',',_}|Rest2] ->
- parse_Enumerations(Rest2,['EXTENSIONMARK'|Acc],undefined);
- _ ->
- {lists:reverse(['EXTENSIONMARK'|Acc]),Rest}
+ case get(extensiondefault) of
+ 'IMPLIED' ->
+ {Root++['EXTENSIONMARK'],Tokens1};
+ _ ->
+ {Root,Tokens1}
+ end
+ end.
+
+parse_Enumeration(Tokens0) ->
+ {Item,Tokens} = parse_EnumerationItem(Tokens0),
+ parse_Enumeration_1(Tokens, [Item]).
+
+parse_Enumeration_1([{',',_}|Tokens1]=Tokens0, Acc) ->
+ try parse_EnumerationItem(Tokens1) of
+ {Item,Tokens} ->
+ parse_Enumeration_1(Tokens, [Item|Acc])
+ catch
+ throw:{asn1_error,_} ->
+ {lists:reverse(Acc),Tokens0}
end;
-parse_Enumerations([H|_T],_,_) ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,identifier]}}).
+parse_Enumeration_1(Tokens, Acc) ->
+ {lists:reverse(Acc),Tokens}.
+
+parse_EnumerationItem([#identifier{},{'(',_}|_]=Tokens) ->
+ parse_NamedNumber(Tokens);
+parse_EnumerationItem([#identifier{val=Id}|Tokens]) ->
+ {Id,Tokens};
+parse_EnumerationItem(Tokens) ->
+ parse_error(Tokens).
+
+%%%
+%%% End of parsing of ENUMERATED.
+%%%
parse_NamedNumberList(Tokens) ->
- parse_NamedNumberList(Tokens,[]).
+ parse_NamedNumberList(Tokens, []).
-parse_NamedNumberList(Tokens,Acc) ->
+parse_NamedNumberList(Tokens, Acc) ->
{NamedNum,Rest} = parse_NamedNumber(Tokens),
case Rest of
[{',',_}|Rest2] ->
parse_NamedNumberList(Rest2,[NamedNum|Acc]);
_ ->
- {lists:reverse([NamedNum|Acc]),Rest}
+ {lists:reverse(Acc, [NamedNum]),Rest}
end.
-parse_NamedNumber([{identifier,_,Name},{'(',_}|Rest]) ->
+parse_NamedNumber([#identifier{val=Name},{'(',_}|Rest]) ->
Flist = [fun parse_SignedNumber/1,
fun parse_DefinedValue/1],
- case (catch parse_or(Rest,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
+ case parse_or(Rest, Flist) of
{NamedNum,[{')',_}|Rest2]} ->
{{'NamedNumber',Name,NamedNum},Rest2};
_ ->
- throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
- [got,get_token(hd(Rest)),expected,'NamedNumberList']}})
+ parse_error(Rest)
end;
parse_NamedNumber(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,identifier]}}).
+ parse_error(Tokens).
+parse_SignedNumber([{number,_,Value}|Rest]) ->
+ {Value,Rest};
+parse_SignedNumber(Tokens) ->
+ parse_error(Tokens).
parse_Tag([{'[',_}|Rest]) ->
{Class,Rest2} = parse_Class(Rest),
@@ -2767,12 +2003,8 @@ parse_Tag([{'[',_}|Rest]) ->
[{']',_}|Rest4] ->
{#tag{class=Class,number=ClassNumber},Rest4};
_ ->
- throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
- [got,get_token(hd(Rest3)),expected,']']}})
- end;
-parse_Tag(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'[']}}).
+ parse_error(Rest3)
+ end.
parse_Class([{'UNIVERSAL',_}|Rest]) ->
{'UNIVERSAL',Rest};
@@ -2791,15 +2023,7 @@ parse_Value(Tokens) ->
Flist = [fun parse_BuiltinValue/1,
fun parse_ValueFromObject/1,
fun parse_DefinedValue/1],
-
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_BuiltinValue([{bstring,_,Bstr}|Rest]) ->
{{bstring,Bstr},Rest};
@@ -2812,18 +2036,11 @@ parse_BuiltinValue(Tokens = [{'{',_}|_Rest]) ->
fun parse_SequenceOfValue/1,
fun parse_SequenceValue/1,
fun parse_ObjectIdentifierValue/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end;
-parse_BuiltinValue([{identifier,_,IdName},{':',_}|Rest]) ->
+ parse_or(Tokens, Flist);
+parse_BuiltinValue([#identifier{val=IdName},{':',_}|Rest]) ->
{Value,Rest2} = parse_Value(Rest),
{{'CHOICE',{IdName,Value}},Rest2};
-parse_BuiltinValue(Tokens=[{'NULL',_},{':',_}|_Rest]) ->
+parse_BuiltinValue([{'NULL',_},{':',_}|_]=Tokens) ->
parse_ObjectClassFieldValue(Tokens);
parse_BuiltinValue([{'NULL',_}|Rest]) ->
{'NULL',Rest};
@@ -2839,31 +2056,29 @@ parse_BuiltinValue([{cstring,_,Cstr}|Rest]) ->
{Cstr,Rest};
parse_BuiltinValue([{number,_,Num}|Rest]) ->
{Num,Rest};
-parse_BuiltinValue([{'-',_},{number,_,Num}|Rest]) ->
- {- Num,Rest};
parse_BuiltinValue(Tokens) ->
parse_ObjectClassFieldValue(Tokens).
-parse_DefinedValue(Tokens=[{identifier,_,_},{'{',_}|_Rest]) ->
- parse_ParameterizedValue(Tokens);
-%% Externalvaluereference
-parse_DefinedValue([{typereference,L1,Tname},{'.',_},{identifier,_,Idname}|Rest]) ->
+parse_DefinedValue(Tokens) ->
+ Flist = [fun parse_ParameterizedValue/1,
+ fun parse_DefinedValue2/1],
+ parse_or(Tokens, Flist).
+
+parse_DefinedValue2([{typereference,L1,Tname},
+ {'.',_},
+ #identifier{val=Idname}|Rest]) ->
{#'Externalvaluereference'{pos=L1,module=Tname,value=Idname},Rest};
%% valuereference
-parse_DefinedValue([Id = {identifier,_,_}|Rest]) ->
+parse_DefinedValue2([#identifier{}=Id|Rest]) ->
{identifier2Extvalueref(Id),Rest};
-%% ParameterizedValue
-parse_DefinedValue(Tokens) ->
- parse_ParameterizedValue(Tokens).
+parse_DefinedValue2(Tokens) ->
+ parse_error(Tokens).
parse_SequenceValue([{'{',_}|Tokens]) ->
- parse_SequenceValue(Tokens,[]);
-parse_SequenceValue(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'{']}}).
+ parse_SequenceValue(Tokens, []).
-parse_SequenceValue([{identifier,Pos,IdName}|Rest],Acc) ->
+parse_SequenceValue([#identifier{pos=Pos,val=IdName}|Rest],Acc) ->
{Value,Rest2} = parse_Value(Rest),
SeqTag = #seqtag{pos=Pos,module=get(asn1_module),val=IdName},
case Rest2 of
@@ -2872,18 +2087,13 @@ parse_SequenceValue([{identifier,Pos,IdName}|Rest],Acc) ->
[{'}',_}|Rest3] ->
{lists:reverse(Acc, [{SeqTag,Value}]),Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'}']}})
+ parse_error(Rest2)
end;
parse_SequenceValue(Tokens,_Acc) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,identifier]}}).
+ parse_error(Tokens).
parse_SequenceOfValue([{'{',_}|Tokens]) ->
- parse_SequenceOfValue(Tokens,[]);
-parse_SequenceOfValue(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'{']}}).
+ parse_SequenceOfValue(Tokens, []).
parse_SequenceOfValue(Tokens,Acc) ->
{Value,Rest2} = parse_Value(Tokens),
@@ -2891,10 +2101,9 @@ parse_SequenceOfValue(Tokens,Acc) ->
[{',',_}|Rest3] ->
parse_SequenceOfValue(Rest3,[Value|Acc]);
[{'}',_}|Rest3] ->
- {lists:reverse([Value|Acc]),Rest3};
+ {lists:reverse(Acc, [Value]),Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'}']}})
+ parse_error(Rest2)
end.
parse_ValueSetTypeAssignment([{typereference,L1,Name}|Rest]) ->
@@ -2904,49 +2113,31 @@ parse_ValueSetTypeAssignment([{typereference,L1,Name}|Rest]) ->
{ValueSet,Rest4} = parse_ValueSet(Rest3),
{#valuedef{pos=L1,name=Name,type=Type,value=ValueSet,
module=get(asn1_module)},Rest4};
- [H|_T] ->
- throw({asn1_error,{get_line(L1),get(asn1_module),
- [got,get_token(H),expected,'::=']}})
- end;
-parse_ValueSetTypeAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- typereference]}}).
+ _ ->
+ parse_error(Rest2)
+ end.
parse_ValueSet([{'{',_}|Rest]) ->
{Elems,Rest2} = parse_ElementSetSpecs(Rest),
case Rest2 of
[{'}',_}|Rest3] ->
{{valueset,Elems},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'}']}})
+ _ ->
+ parse_error(Rest2)
end;
parse_ValueSet(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'{']}}).
+ parse_error(Tokens).
-parse_ValueAssignment([{identifier,L1,IdName}|Rest]) ->
+parse_ValueAssignment([#identifier{pos=L1,val=IdName}|Rest]) ->
{Type,Rest2} = parse_Type(Rest),
case Rest2 of
[{'::=',_}|Rest3] ->
{Value,Rest4} = parse_Value(Rest3),
- case catch lookahead_assignment(Rest4) of
- ok ->
- {#valuedef{pos=L1,name=IdName,type=Type,value=Value,
- module=get(asn1_module)},Rest4};
- Error ->
- throw(Error)
-%% throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
-%% [got,get_token(hd(Rest2)),expected,'::=']}})
- end;
+ {#valuedef{pos=L1,name=IdName,type=Type,value=Value,
+ module=get(asn1_module)},Rest4};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'::=']}})
- end;
-parse_ValueAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,identifier]}}).
+ parse_error(Rest2)
+ end.
%% SizeConstraint
parse_SubtypeElements([{'SIZE',_}|Tokens]) ->
@@ -2966,8 +2157,7 @@ parse_SubtypeElements([{'WITH',_},{'COMPONENTS',_},{'{',_},{'...',_},{',',_}|Tok
[{'}',_}|Rest2] ->
{{'WITH COMPONENTS',{'PartialSpecification',Constraint}},Rest2};
_ ->
- throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
- [got,get_token(hd(Rest)),expected,'}']}})
+ parse_error(Rest)
end;
parse_SubtypeElements([{'WITH',_},{'COMPONENTS',_},{'{',_}|Tokens]) ->
{Constraint,Rest} = parse_TypeConstraints(Tokens),
@@ -2975,28 +2165,18 @@ parse_SubtypeElements([{'WITH',_},{'COMPONENTS',_},{'{',_}|Tokens]) ->
[{'}',_}|Rest2] ->
{{'WITH COMPONENTS',{'FullSpecification',Constraint}},Rest2};
_ ->
- throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
- [got,get_token(hd(Rest)),expected,'}']}})
+ parse_error(Rest)
end;
parse_SubtypeElements([{'PATTERN',_}|Tokens]) ->
{Value,Rest} = parse_Value(Tokens),
{{pattern,Value},Rest};
-%% SingleValue
-%% ContainedSubtype
-%% ValueRange
-%% TypeConstraint
-%% Moved fun parse_Value/1 and fun parse_Type/1 to parse_Elements
parse_SubtypeElements(Tokens) ->
Flist = [fun parse_ContainedSubtype/1,
fun parse_Value/1,
- fun([{'MIN',_}|T]) -> {'MIN',T} end,
+ fun parse_MIN/1,
fun parse_Type/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- {asn1_error,Reason} ->
- throw(Reason);
- Result = {Val,_} when is_record(Val,type) ->
+ case parse_or(Tokens, Flist) of
+ {#type{},_}=Result ->
Result;
{Lower,[{'..',_}|Rest]} ->
{Upper,Rest2} = parse_UpperEndpoint(Rest),
@@ -3014,10 +2194,7 @@ parse_ContainedSubtype([{'INCLUDES',_}|Rest]) ->
{Type,Rest2} = parse_Type(Rest),
{{'ContainedSubtype',Type},Rest2};
parse_ContainedSubtype(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'INCLUDES']}}).
-%%parse_ContainedSubtype(Tokens) -> %this option is moved to parse_SubtypeElements
-%% parse_Type(Tokens).
+ parse_error(Tokens).
parse_UpperEndpoint([{'<',_}|Rest]) ->
parse_UpperEndpoint(lt,Rest);
@@ -3025,33 +2202,38 @@ parse_UpperEndpoint(Tokens) ->
parse_UpperEndpoint(false,Tokens).
parse_UpperEndpoint(Lt,Tokens) ->
- Flist = [ fun([{'MAX',_}|T]) -> {'MAX',T} end,
- fun parse_Value/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- {Value,Rest2} when Lt == lt ->
+ Flist = [fun parse_MAX/1,
+ fun parse_Value/1],
+ case parse_or(Tokens, Flist) of
+ {Value,Rest2} when Lt =:= lt ->
{{lt,Value},Rest2};
{Value,Rest2} ->
{Value,Rest2}
end.
+parse_MIN([{'MIN',_}|T]) ->
+ {'MIN',T};
+parse_MIN(Tokens) ->
+ parse_error(Tokens).
+
+parse_MAX([{'MAX',_}|T]) ->
+ {'MAX',T};
+parse_MAX(Tokens) ->
+ parse_error(Tokens).
+
parse_TypeConstraints(Tokens) ->
- parse_TypeConstraints(Tokens,[]).
+ parse_TypeConstraints(Tokens, []).
-parse_TypeConstraints([{identifier,_,_}|Rest],Acc) ->
+parse_TypeConstraints([#identifier{}|Rest], Acc) ->
{ComponentConstraint,Rest2} = parse_ComponentConstraint(Rest),
case Rest2 of
[{',',_}|Rest3] ->
- parse_TypeConstraints(Rest3,[ComponentConstraint|Acc]);
+ parse_TypeConstraints(Rest3, [ComponentConstraint|Acc]);
_ ->
- {lists:reverse([ComponentConstraint|Acc]),Rest2}
+ {lists:reverse(Acc, [ComponentConstraint]),Rest2}
end;
-parse_TypeConstraints([H|_T],_) ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,identifier]}}).
+parse_TypeConstraints(Tokens, _) ->
+ parse_error(Tokens).
parse_ComponentConstraint(Tokens = [{'(',_}|_Rest]) ->
{ValueConstraint,Rest2} = parse_Constraint(Tokens),
@@ -3071,145 +2253,36 @@ parse_PresenceConstraint(Tokens) ->
{asn1_empty,Tokens}.
-% merge_constraints({Rlist,ExtList}) -> % extensionmarker in constraint
-% {merge_constraints(Rlist,[],[]),
-% merge_constraints(ExtList,[],[])};
-
-%% An arg with a constraint with extension marker will look like
-%% [#constraint{c={Root,Ext}}|Rest]
-
merge_constraints(Clist) ->
merge_constraints(Clist, [], []).
-merge_constraints([Ch|Ct],Cacc, Eacc) ->
- NewEacc = case Ch#constraint.e of
- undefined -> Eacc;
- E -> [E|Eacc]
- end,
- merge_constraints(Ct,[fixup_constraint(Ch#constraint.c)|Cacc],NewEacc);
-
-merge_constraints([],Cacc,[]) ->
-%% lists:flatten(Cacc);
+merge_constraints([#constraint{c=C,e=E}|T], Cacc0, Eacc0) ->
+ Eacc = case E of
+ undefined -> Eacc0;
+ E -> [E|Eacc0]
+ end,
+ Cacc = [C|Cacc0],
+ merge_constraints(T, Cacc, Eacc);
+merge_constraints([], Cacc, []) ->
lists:reverse(Cacc);
-merge_constraints([],Cacc,Eacc) ->
-%% lists:flatten(Cacc) ++ [{'Errors',Eacc}].
- lists:reverse(Cacc) ++ [{'Errors',Eacc}].
-
-
-fixup_constraint(C) ->
- case C of
- {'SingleValue',SubType} when element(1,SubType) == 'ContainedSubtype' ->
- SubType;
- {'SingleValue',V} when is_list(V) ->
- C;
- %% [C,{'ValueRange',{lists:min(V),lists:max(V)}}];
- %% bug, turns wrong when an element in V is a reference to a defined value
- {'PermittedAlphabet',{'SingleValue',V}} when is_list(V) ->
- %%sort and remove duplicates
- V2 = {'SingleValue',
- ordsets:from_list(lists:flatten(V))},
- {'PermittedAlphabet',V2};
- {'PermittedAlphabet',{'SingleValue',V}} ->
- V2 = {'SingleValue',[V]},
- {'PermittedAlphabet',V2};
- {'SizeConstraint',Sc} ->
- {'SizeConstraint',fixup_size_constraint(Sc)};
-
- List when is_list(List) -> %% In This case maybe a union or intersection
- [fixup_constraint(Xc)||Xc <- List];
- Other ->
- Other
- end.
+merge_constraints([], Cacc, Eacc) ->
+ lists:reverse(Cacc) ++ [{element_set,{'Errors',Eacc},none}].
-fixup_size_constraint({'ValueRange',{Lb,Ub}}) ->
- {Lb,Ub};
-fixup_size_constraint({{'ValueRange',R},[]}) ->
- {R,[]};
-fixup_size_constraint({[],{'ValueRange',R}}) ->
- {[],R};
-fixup_size_constraint({{'ValueRange',R1},{'ValueRange',R2}}) ->
- {R1,R2};
-fixup_size_constraint({'SingleValue',[Sv]}) ->
- fixup_size_constraint({'SingleValue',Sv});
-fixup_size_constraint({'SingleValue',L}) when is_list(L) ->
- ordsets:from_list(L);
-fixup_size_constraint({'SingleValue',L}) ->
- {L,L};
-fixup_size_constraint({'SizeConstraint',C}) ->
- %% this is a second SIZE
- fixup_size_constraint(C);
-fixup_size_constraint({C1,C2}) ->
- %% this is with extension marks
- {turn2vr(fixup_size_constraint(C1)), extension_size(fixup_size_constraint(C2))};
-fixup_size_constraint(CList) when is_list(CList) ->
- [fixup_constraint(Xc)||Xc <- CList].
-
-turn2vr(L) when is_list(L) ->
- L2 =[X||X<-ordsets:from_list(L),is_integer(X)],
- case L2 of
- [H|_] ->
- {H,hd(lists:reverse(L2))};
- _ ->
- L
- end;
-turn2vr(VR) ->
- VR.
-extension_size({I,I}) ->
- [I];
-extension_size({I1,I2}) ->
- [I1,I2];
-extension_size(C) ->
- C.
-
-get_line({_,Pos,Token}) when is_integer(Pos),is_atom(Token) ->
+get_line({Token,Pos,_}) when is_integer(Pos), is_atom(Token) ->
Pos;
get_line({Token,Pos}) when is_integer(Pos),is_atom(Token) ->
- Pos;
-get_line(_) ->
- undefined.
-
-get_token({_,Pos,Token}) when is_integer(Pos),is_atom(Token) ->
- Token;
+ Pos.
+
+get_token({valuefieldreference,_,FieldName}) ->
+ list_to_atom([$&|atom_to_list(FieldName)]);
+get_token({typefieldreference,_,FieldName}) ->
+ list_to_atom([$&|atom_to_list(FieldName)]);
+get_token({Token,Pos,Value}) when is_integer(Pos), is_atom(Token) ->
+ Value;
get_token({'$end',Pos}) when is_integer(Pos) ->
- undefined;
+ 'END-OF-FILE';
get_token({Token,Pos}) when is_integer(Pos),is_atom(Token) ->
- Token;
-get_token(_) ->
- undefined.
-
-prioritize_error(ErrList) ->
- case lists:keymember(asn1_error,1,ErrList) of
- false -> % only asn1_assignment_error -> take the last
- lists:last(ErrList);
- true -> % contains errors from deeper in a Type
- NewErrList = [_Err={_,_}|_RestErr] =
- lists:filter(fun({asn1_error,_})->true;(_)->false end,
- ErrList),
- SplitErrs =
- lists:splitwith(fun({_,X})->
- case element(1,X) of
- Int when is_integer(Int) -> true;
- _ -> false
- end
- end,
- NewErrList),
- case SplitErrs of
- {[],UndefPosErrs} -> % if no error with Positon exists
- lists:last(UndefPosErrs);
- {IntPosErrs,_} ->
- IntPosReasons = lists:map(fun(X)->element(2,X) end,IntPosErrs),
- SortedReasons = lists:keysort(1,IntPosReasons),
- {asn1_error,lists:last(SortedReasons)}
- end
- end.
-
-%% most_prio_error([H={_,Reason}|T],Atom,Err) when is_atom(Atom) ->
-%% most_prio_error(T,element(1,Reason),H);
-%% most_prio_error([H={_,Reason}|T],Greatest,Err) ->
-%% case element(1,Reason) of
-%% Pos when is_integer(Pos),Pos>Greatest ->
-%% most_prio_error(
-
+ Token.
tref2Exttref(#typereference{pos=Pos,val=Name}) ->
#'Externaltypereference'{pos=Pos,
@@ -3226,19 +2299,5 @@ identifier2Extvalueref(#identifier{pos=Pos,val=Name}) ->
module=resolve_module(Name),
value=Name}.
-%% lookahead_assignment/1 checks that the next sequence of tokens
-%% in Token contain a valid assignment or the
-%% 'END' token. Otherwise an exception is thrown.
-lookahead_assignment([{'END',_}|_Rest]) ->
- ok;
-lookahead_assignment(Tokens) ->
- parse_Assignment(Tokens),
- ok.
-
-is_pre_defined_class('TYPE-IDENTIFIER') ->
- true;
-is_pre_defined_class('ABSTRACT-SYNTAX') ->
- true;
-is_pre_defined_class(_) ->
- false.
-
+parse_error(Tokens) ->
+ throw({asn1_error,{parse_error,Tokens}}).
diff --git a/lib/asn1/src/asn1ct_tok.erl b/lib/asn1/src/asn1ct_tok.erl
index 8687ed955c..d51fea6402 100644
--- a/lib/asn1/src/asn1ct_tok.erl
+++ b/lib/asn1/src/asn1ct_tok.erl
@@ -21,191 +21,177 @@
%% Tokenize ASN.1 code (input to parser generated with yecc)
--export([get_name/2,tokenise/4, file/1]).
+-export([file/1,format_error/1]).
-
-file(File) ->
- case file:open(File, [read]) of
+file(File0) ->
+ case file:open(File0, [read]) of
{error, Reason} ->
- {error,{File,file:format_error(Reason)}};
+ {error,{File0,file:format_error(Reason)}};
{ok,Stream} ->
- process(Stream,0,[])
+ try
+ process(Stream, 1, [])
+ catch
+ throw:{error,Line,Reason} ->
+ File = filename:basename(File0),
+ Error = {structured_error,{File,Line},?MODULE,Reason},
+ {error,[Error]}
+ end
end.
-process(Stream,Lno,R) ->
- process(io:get_line(Stream, ''), Stream,Lno+1,R).
+process(Stream, Lno, R) ->
+ process(io:get_line(Stream, ''), Stream, Lno, R).
-process(eof, Stream,Lno,R) ->
+process(eof, Stream, Lno, Acc) ->
ok = file:close(Stream),
- lists:flatten(lists:reverse([{'$end',Lno}|R]));
-
-
-process(L, Stream,Lno,R) when is_list(L) ->
- %%io:format('read:~s',[L]),
- case catch tokenise(Stream,L,Lno,[]) of
- {'ERR',Reason} ->
- io:format("Tokeniser error on line: ~w ~w~n",[Lno,Reason]),
- exit(0);
- {NewLno,T} ->
- %%io:format('toks:~w~n',[T]),
- process(Stream,NewLno,[T|R])
- end.
-
-tokenise(Stream,[H|T],Lno,R) when $a =< H , H =< $z ->
- {X, T1} = get_name(T, [H]),
- tokenise(Stream,T1,Lno,[{identifier,Lno, list_to_atom(X)}|R]);
-
-tokenise(Stream,[$&,H|T],Lno,R) when $A =< H , H =< $Z ->
- {Y, T1} = get_name(T, [H]),
- X = list_to_atom(Y),
- tokenise(Stream,T1,Lno,[{typefieldreference, Lno, X} | R]);
-
-tokenise(Stream,[$&,H|T],Lno,R) when $a =< H , H =< $z ->
- {Y, T1} = get_name(T, [H]),
- X = list_to_atom(Y),
- tokenise(Stream,T1,Lno,[{valuefieldreference, Lno, X} | R]);
-
-tokenise(Stream,[H|T],Lno,R) when $A =< H , H =< $Z ->
- {Y, T1} = get_name(T, [H]),
- X = list_to_atom(Y),
- case reserved_word(X) of
- true ->
- tokenise(Stream,T1,Lno,[{X,Lno}|R]);
- false ->
- tokenise(Stream,T1,Lno,[{typereference,Lno,X}|R]);
- rstrtype ->
- tokenise(Stream,T1,Lno,[{restrictedcharacterstringtype,Lno,X}|R])
- end;
-
-tokenise(Stream,[$-,H|T],Lno,R) when $0 =< H , H =< $9 ->
- {X, T1} = get_number(T, [H]),
- tokenise(Stream,T1,Lno,[{number,Lno,-1 * list_to_integer(X)}|R]);
+ lists:reverse([{'$end',Lno}|Acc]);
+process(L, Stream, Lno0, Acc) when is_list(L) ->
+ try tokenise(Stream, L, Lno0, []) of
+ {Lno,[]} ->
+ process(Stream, Lno, Acc);
+ {Lno,Ts} ->
+ process(Stream, Lno, Ts++Acc)
+ catch
+ throw:{error,Reason} ->
+ throw({error,Lno0,Reason})
+ end.
-tokenise(Stream,[H|T],Lno,R) when $0 =< H , H =< $9 ->
+format_error(eof_in_comment) ->
+ "premature end of file in multi-line comment";
+format_error(eol_in_token) ->
+ "end of line in token";
+format_error({invalid_binary_number,Str}) ->
+ io_lib:format("invalid binary number: '~s'", [Str]);
+format_error({invalid_hex_number,Str}) ->
+ io_lib:format("invalid hex number: '~s'", [Str]);
+format_error(Other) ->
+ io_lib:format("~p", [Other]).
+
+tokenise(Stream, [$&,H|T], Lno, R) when $A =< H , H =< $Z ->
+ {X,T1} = get_name(T, [H]),
+ tokenise(Stream, T1, Lno, [{typefieldreference,Lno,X}|R]);
+tokenise(Stream, [$&,H|T], Lno, R) when $a =< H , H =< $z ->
+ {X,T1} = get_name(T, [H]),
+ tokenise(Stream, T1, Lno, [{valuefieldreference,Lno,X}|R]);
+
+tokenise(Stream, "--"++T, Lno, R) ->
+ tokenise(Stream, skip_comment(T), Lno, R);
+
+tokenise(Stream, [$-,H|T], Lno, R) when $0 =< H , H =< $9 ->
{X, T1} = get_number(T, [H]),
- tokenise(Stream,T1,Lno,[{number,Lno,list_to_integer(X)}|R]);
-
-tokenise(Stream,[$-,$-|T],Lno,R) ->
- tokenise(Stream,skip_comment(T),Lno,R);
+ tokenise(Stream, T1, Lno, [{number,Lno,-list_to_integer(X)}|R]);
-tokenise(Stream,[$/,$*|T],Lno,R) ->
- {NewLno,T1} = skip_multiline_comment(Stream,T,Lno,0),
- tokenise(Stream,T1,NewLno,R);
+tokenise(Stream, "/*"++T, Lno0, R) ->
+ {Lno,T1} = skip_multiline_comment(Stream, T, Lno0, 0),
+ tokenise(Stream, T1, Lno, R);
-tokenise(Stream,[$:,$:,$=|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'::=',Lno}|R]);
-
-tokenise(Stream,[$'|T],Lno,R) ->
- case catch collect_quoted(T,Lno,[]) of
- {'ERR',_} ->
- throw({'ERR','bad_quote'});
- {Thing, T1} ->
- tokenise(Stream,T1,Lno,[Thing|R])
- end;
+tokenise(Stream, "::="++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{'::=',Lno}|R]);
+tokenise(Stream, ":"++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{':',Lno}|R]);
+tokenise(Stream, "'"++T0, Lno, R) ->
+ {Thing, T1} = collect_quoted(T0, Lno, []),
+ tokenise(Stream, T1, Lno, [Thing|R]);
tokenise(Stream,[$"|T],Lno,R) ->
{Str,T1} = collect_string(T,Lno),
tokenise(Stream,T1,Lno,[Str|R]);
-tokenise(Stream,[${|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'{',Lno}|R]);
-
-tokenise(Stream,[$}|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'}',Lno}|R]);
-
-%% tokenise(Stream,[$],$]|T],Lno,R) ->
-%% tokenise(Stream,T,Lno,[{']]',Lno}|R]);
+tokenise(Stream, "{"++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{'{',Lno}|R]);
+tokenise(Stream, "}"++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{'}',Lno}|R]);
%% Even though x.680 specify '[[' and ']]' as lexical items
-%% it does not work to have them as such since the single [ and ] can
-%% be used beside each other in the SYNTAX OF in x.681
-%% the solution chosen here , i.e. to have them as separate lexical items
+%% it does not work to have them as such since the single '[' and ']' can
+%% be used beside each other in 'WITH SYNTAX' in x.681.
+%% The solution chosen here, i.e. to have them as separate lexical items
%% will not detect the cases where there is white space between them
-%% which would be an error in the use in ExtensionAdditionGroups
-
-%% tokenise(Stream,[$[,$[|T],Lno,R) ->
-%% tokenise(Stream,T,Lno,[{'[[',Lno}|R]);
-
-tokenise(Stream,[$]|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{']',Lno}|R]);
-
-tokenise(Stream,[$[|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'[',Lno}|R]);
+%% which would be an error in the use in ExtensionAdditionGroups.
-tokenise(Stream,[$,|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{',',Lno}|R]);
+tokenise(Stream, "]"++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{']',Lno}|R]);
+tokenise(Stream, "["++T,Lno,R) ->
+ tokenise(Stream, T, Lno, [{'[',Lno}|R]);
-tokenise(Stream,[$(|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'(',Lno}|R]);
-tokenise(Stream,[$)|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{')',Lno}|R]);
+tokenise(Stream, ","++T,Lno,R) ->
+ tokenise(Stream, T, Lno, [{',',Lno}|R]);
-tokenise(Stream,[$.,$.,$.|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'...',Lno}|R]);
+tokenise(Stream, "("++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{'(',Lno}|R]);
+tokenise(Stream, ")"++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{')',Lno}|R]);
-tokenise(Stream,[$.,$.|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'..',Lno}|R]);
+tokenise(Stream, "..."++T,Lno,R) ->
+ tokenise(Stream, T, Lno, [{'...',Lno}|R]);
+tokenise(Stream, ".."++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{'..',Lno}|R]);
+tokenise(Stream, "."++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{'.',Lno}|R]);
-tokenise(Stream,[$.|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'.',Lno}|R]);
-tokenise(Stream,[$^|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'^',Lno}|R]);
-tokenise(Stream,[$!|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'!',Lno}|R]);
-tokenise(Stream,[$||T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'|',Lno}|R]);
+tokenise(Stream, "|"++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{'|',Lno}|R]);
-tokenise(Stream,[H|T],Lno,R) ->
- case white_space(H) of
+tokenise(Stream, [H|T], Lno, R) when $A =< H , H =< $Z ->
+ {X,T1} = get_name(T, [H]),
+ case reserved_word(X) of
true ->
- tokenise(Stream,T,Lno,R);
+ tokenise(Stream, T1, Lno, [{X,Lno}|R]);
false ->
- tokenise(Stream,T,Lno,[{list_to_atom([H]),Lno}|R])
+ tokenise(Stream, T1, Lno, [{typereference,Lno,X}|R]);
+ rstrtype ->
+ tokenise(Stream, T1, Lno, [{restrictedcharacterstringtype,Lno,X}|R])
end;
-tokenise(_Stream,[],Lno,R) ->
- {Lno,lists:reverse(R)}.
+tokenise(Stream, [H|T], Lno, R) when $a =< H , H =< $z ->
+ {X, T1} = get_name(T, [H]),
+ tokenise(Stream, T1, Lno, [{identifier,Lno,X}|R]);
-collect_string(L,Lno) ->
- collect_string(L,Lno,[]).
+tokenise(Stream, [H|T], Lno, R) when $0 =< H , H =< $9 ->
+ {X, T1} = get_number(T, [H]),
+ tokenise(Stream, T1, Lno, [{number,Lno,list_to_integer(X)}|R]);
-collect_string([],_,_) ->
- throw({'ERR','bad_quote found eof'});
+tokenise(Stream, [H|T], Lno, R) when H =< $\s ->
+ tokenise(Stream, T, Lno, R);
-collect_string([H|T],Lno,Str) ->
- case H of
- $" ->
- {{cstring,1,lists:reverse(Str)},T};
- Ch ->
- collect_string(T,Lno,[Ch|Str])
- end.
-
+tokenise(Stream, [H|T], Lno, R) ->
+ tokenise(Stream, T, Lno, [{list_to_atom([H]),Lno}|R]);
+tokenise(_Stream, [], Lno, R) ->
+ {Lno+1,R}.
-% <name> is letters digits hyphens
-% hypen is not the last character. Hypen hyphen is NOT allowed
-%
-% <identifier> ::= <lowercase> <name>
+collect_string(L, Lno) ->
+ collect_string(L, Lno, []).
-get_name([$-,Char|T], L) ->
+collect_string([$"|T], _Lno, Str) ->
+ {{cstring,1,lists:reverse(Str)},T};
+collect_string([H|T], Lno, Str) ->
+ collect_string(T, Lno, [H|Str]);
+collect_string([], _, _) ->
+ throw({error,missing_quote_at_eof}).
+
+%% <name> is letters digits hyphens.
+%% Hypen is not the last character. Hypen hyphen is NOT allowed.
+%%
+%% <identifier> ::= <lowercase> <name>
+
+get_name([$-,Char|T]=T0, Acc) ->
case isalnum(Char) of
true ->
- get_name(T,[Char,$-|L]);
+ get_name(T, [Char,$-|Acc]);
false ->
- {lists:reverse(L),[$-,Char|T]}
+ {list_to_atom(lists:reverse(Acc)),T0}
end;
-get_name([$-|T], L) ->
- {lists:reverse(L),[$-|T]};
-get_name([Char|T], L) ->
+get_name([$-|_]=T, Acc) ->
+ {list_to_atom(lists:reverse(Acc)),T};
+get_name([Char|T]=T0, Acc) ->
case isalnum(Char) of
true ->
- get_name(T,[Char|L]);
+ get_name(T, [Char|Acc]);
false ->
- {lists:reverse(L),[Char|T]}
+ {list_to_atom(lists:reverse(Acc)),T0}
end;
-get_name([], L) ->
- {lists:reverse(L), []}.
-
+get_name([], Acc) ->
+ {list_to_atom(lists:reverse(Acc)),[]}.
isalnum(H) when $A =< H , H =< $Z ->
true;
@@ -221,67 +207,54 @@ isdigit(H) when $0 =< H , H =< $9 ->
isdigit(_) ->
false.
-white_space(9) -> true;
-white_space(10) -> true;
-white_space(13) -> true;
-white_space(32) -> true;
-white_space(_) -> false.
-
-
-get_number([H|T], L) ->
+get_number([H|T]=T0, L) ->
case isdigit(H) of
true ->
get_number(T, [H|L]);
false ->
- {lists:reverse(L), [H|T]}
+ {lists:reverse(L), T0}
end;
get_number([], L) ->
{lists:reverse(L), []}.
-skip_comment([]) ->
- [];
-skip_comment([$-,$-|T]) ->
- T;
-skip_comment([_|T]) ->
- skip_comment(T).
-
+skip_comment([]) -> [];
+skip_comment("--"++T) -> T;
+skip_comment([_|T]) -> skip_comment(T).
-skip_multiline_comment(Stream,[],Lno,Level) ->
- case io:get_line(Stream,'') of
+skip_multiline_comment(Stream, [], Lno, Level) ->
+ case io:get_line(Stream, '') of
eof ->
- io:format("Tokeniser error on line: ~w~n"
- "premature end of multiline comment~n",[Lno]),
- exit(0);
+ throw({error,eof_in_comment});
Line ->
- skip_multiline_comment(Stream,Line,Lno+1,Level)
+ skip_multiline_comment(Stream, Line, Lno+1, Level)
end;
-skip_multiline_comment(_Stream,[$*,$/|T],Lno,0) ->
+skip_multiline_comment(_Stream, "*/"++T, Lno, 0) ->
{Lno,T};
-skip_multiline_comment(Stream,[$*,$/|T],Lno,Level) ->
- skip_multiline_comment(Stream,T,Lno,Level - 1);
-skip_multiline_comment(Stream,[$/,$*|T],Lno,Level) ->
- skip_multiline_comment(Stream,T,Lno,Level + 1);
-skip_multiline_comment(Stream,[_|T],Lno,Level) ->
- skip_multiline_comment(Stream,T,Lno,Level).
-
-collect_quoted([$',$B|T],Lno, L) ->
+skip_multiline_comment(Stream, "*/"++T, Lno, Level) ->
+ skip_multiline_comment(Stream, T, Lno, Level - 1);
+skip_multiline_comment(Stream, "/*"++T, Lno, Level) ->
+ skip_multiline_comment(Stream, T, Lno, Level + 1);
+skip_multiline_comment(Stream, [_|T], Lno, Level) ->
+ skip_multiline_comment(Stream, T, Lno, Level).
+
+collect_quoted("'B"++T, Lno, L) ->
case check_bin(L) of
true ->
- {{bstring,Lno, lists:reverse(L)}, T};
+ {{bstring,Lno,lists:reverse(L)}, T};
false ->
- throw({'ERR',{invalid_binary_number, lists:reverse(L)}})
+ throw({error,{invalid_binary_number,lists:reverse(L)}})
end;
-collect_quoted([$',$H|T],Lno, L) ->
+collect_quoted("'H"++T, Lno, L) ->
case check_hex(L) of
true ->
- {{hstring,Lno, lists:reverse(L)}, T};
+ {{hstring,Lno,lists:reverse(L)}, T};
false ->
- throw({'ERR',{invalid_binary_number, lists:reverse(L)}})
+ throw({error,{invalid_hex_number,lists:reverse(L)}})
end;
collect_quoted([H|T], Lno, L) ->
collect_quoted(T, Lno,[H|L]);
collect_quoted([], _, _) -> % This should be allowed FIX later
- throw({'ERR',{eol_in_token}}).
+ throw({error,eol_in_token}).
check_bin([$0|T]) ->
check_bin(T);
@@ -351,7 +324,6 @@ reserved_word('INCLUDES') -> true;
reserved_word('INSTANCE') -> true;
reserved_word('INTEGER') -> true;
reserved_word('INTERSECTION') -> true;
-reserved_word('ISO646String') -> rstrtype;
reserved_word('MAX') -> true;
reserved_word('MIN') -> true;
reserved_word('MINUS-INFINITY') -> true;
diff --git a/lib/asn1/test/Makefile b/lib/asn1/test/Makefile
index b1b08aa9f9..ea5a0f857e 100644
--- a/lib/asn1/test/Makefile
+++ b/lib/asn1/test/Makefile
@@ -78,6 +78,7 @@ MODULES= \
testEnumExt \
testInfObjectClass \
testInfObj \
+ testInfObjExtract \
testParameterizedInfObj \
testFragmented \
testMergeCompile \
@@ -104,14 +105,19 @@ MODULES= \
test_compile_options \
testDoubleEllipses \
test_modified_x420 \
- testX420 \
test_x691 \
testWSParamClass \
+ testValueTest \
+ testUniqueObjectSets \
+ testRfcs \
+ testImporting \
+ testExtensibilityImplied \
asn1_test_lib \
asn1_app_test \
asn1_appup_test \
asn1_SUITE \
- error_SUITE
+ error_SUITE \
+ syntax_SUITE
ERL_FILES= $(MODULES:%=%.erl)
diff --git a/lib/asn1/test/asn1_SUITE.erl b/lib/asn1/test/asn1_SUITE.erl
index 432197eec0..9dfcc3f571 100644
--- a/lib/asn1/test/asn1_SUITE.erl
+++ b/lib/asn1/test/asn1_SUITE.erl
@@ -52,9 +52,7 @@ all() ->
groups() ->
Parallel = asn1_test_lib:parallel(),
[{compile, Parallel,
- [c_syntax,
- c_string,
- c_implicit_before_choice,
+ [c_string,
constraint_equivalence]},
{ber, Parallel,
@@ -89,6 +87,7 @@ groups() ->
ber_other,
der,
h323test]},
+ testExtensibilityImplied,
testChoPrim,
testChoExtension,
testChoOptional,
@@ -135,19 +134,19 @@ groups() ->
testChoiceIndefinite,
per_open_type,
testInfObjectClass,
+ testUniqueObjectSets,
+ testInfObjExtract,
testParam,
testFragmented,
testMergeCompile,
testobj,
testDeepTConstr,
- testExport,
testImport,
testDER,
testDEFAULT,
testMvrasn6,
testContextSwitchingTypes,
testOpenTypeImplicitTag,
- duplicate_tags,
testROSE,
testINSTANCE_OF,
testTCAP,
@@ -158,16 +157,19 @@ groups() ->
testNortel,
% Uses 'PKCS7', 'InformationFramework'
{group, [], [test_WS_ParamClass,
- test_modified_x420,
- testX420]},
- testTcapsystem,
- testNBAPsystem,
- testS1AP,
+ test_modified_x420]},
+ %% Don't run all these at the same time.
+ {group, [],
+ [testTcapsystem,
+ testNBAPsystem,
+ testS1AP,
+ testRfcs]},
test_compile_options,
testDoubleEllipses,
test_x691,
ticket_6143,
- test_OTP_9688]},
+ test_OTP_9688,
+ testValueTest]},
{performance, [],
[testTimer_ber,
@@ -196,7 +198,7 @@ init_per_testcase(Func, Config) ->
true = code:add_patha(CaseDir),
Dog = case Func of
- testX420 -> ct:timetrap({minutes, 90});
+ testRfcs -> ct:timetrap({minutes, 90});
_ -> ct:timetrap({minutes, 60})
end,
[{case_dir, CaseDir}, {watchdog, Dog}|Config].
@@ -374,6 +376,12 @@ testExternal(Config, Rule, Opts) ->
testSetOfTag:main(Rule),
testSetTag:main(Rule).
+testExtensibilityImplied(Config) ->
+ test(Config, fun testExtensibilityImplied/3).
+testExtensibilityImplied(Config, Rule, Opts) ->
+ asn1_test_lib:compile("ExtensibilityImplied", Config,
+ [Rule,no_ok_wrapper|Opts]),
+ testExtensibilityImplied:main().
testChoPrim(Config) -> test(Config, fun testChoPrim/3).
testChoPrim(Config, Rule, Opts) ->
@@ -561,39 +569,21 @@ testSetOfCho(Config, Rule, Opts) ->
asn1_test_lib:compile("SetOfCho", Config, [Rule|Opts]),
testSetOfCho:main(Rule).
-c_syntax(Config) ->
- DataDir = ?config(data_dir, Config),
- [{error, _} = asn1ct:compile(filename:join(DataDir, F))
- || F <-["Syntax",
- "BadTypeEnding",
- "BadValueAssignment1",
- "BadValueAssignment2",
- "BadValueSet",
- "ChoiceBadExtension",
- "EnumerationBadExtension",
- "Example",
- "Export1",
- "MissingEnd",
- "SequenceBadComma",
- "SequenceBadComponentName",
- "SequenceBadComponentType",
- "SeqBadComma"]].
-
c_string(Config) ->
test(Config, fun c_string/3).
c_string(Config, Rule, Opts) ->
asn1_test_lib:compile("String", Config, [Rule|Opts]),
asn1ct:test('String').
-c_implicit_before_choice(Config) ->
- test(Config, fun c_implicit_before_choice/3, [ber]).
-c_implicit_before_choice(Config, Rule, Opts) ->
- DataDir = ?config(data_dir, Config),
- CaseDir = ?config(case_dir, Config),
- {error, _R2} = asn1ct:compile(filename:join(DataDir, "CCSNARG3"),
- [Rule, {outdir, CaseDir}|Opts]).
-
constraint_equivalence(Config) ->
+ constraint_equivalence_abs(Config),
+ test(Config, fun constraint_equivalence/3).
+
+constraint_equivalence(Config, Rule, Opts) ->
+ M = 'ConstraintEquivalence',
+ asn1_test_lib:compile(M, Config, [Rule|Opts]).
+
+constraint_equivalence_abs(Config) ->
DataDir = ?config(data_dir, Config),
CaseDir = ?config(case_dir, Config),
Asn1Spec = "ConstraintEquivalence",
@@ -765,6 +755,16 @@ testInfObjectClass(Config, Rule, Opts) ->
testInfObjectClass:main(Rule),
testInfObj:main(Rule).
+testUniqueObjectSets(Config) -> test(Config, fun testUniqueObjectSets/3).
+testUniqueObjectSets(Config, Rule, Opts) ->
+ CaseDir = ?config(case_dir, Config),
+ testUniqueObjectSets:main(CaseDir, Rule, Opts).
+
+testInfObjExtract(Config) -> test(Config, fun testInfObjExtract/3).
+testInfObjExtract(Config, Rule, Opts) ->
+ asn1_test_lib:compile("InfObjExtract", Config, [Rule|Opts]),
+ testInfObjExtract:main().
+
testParam(Config) ->
test(Config, fun testParam/3, [ber,{ber,[der]},per,uper]).
testParam(Config, Rule, Opts) ->
@@ -804,18 +804,14 @@ testDeepTConstr(Config, Rule, Opts) ->
[Rule|Opts]),
testDeepTConstr:main(Rule).
-testExport(Config) ->
- {error, _} =
- asn1ct:compile(filename:join(?config(data_dir, Config),
- "IllegalExport"),
- [{outdir, ?config(case_dir, Config)}]).
-
testImport(Config) ->
test(Config, fun testImport/3).
testImport(Config, Rule, Opts) ->
- Files = ["ImportsFrom","ImportsFrom2","ImportsFrom3"],
+ Files = ["ImportsFrom","ImportsFrom2","ImportsFrom3",
+ "Importing","Exporting"],
asn1_test_lib:compile_all(Files, Config, [Rule|Opts]),
42 = 'ImportsFrom':i(),
+ testImporting:main(),
ok.
testMegaco(Config) -> test(Config, fun testMegaco/3).
@@ -839,24 +835,20 @@ testContextSwitchingTypes(Config, Rule, Opts) ->
testTypeValueNotation(Config) -> test(Config, fun testTypeValueNotation/3).
testTypeValueNotation(Config, Rule, Opts) ->
- asn1_test_lib:compile_all(["SeqTypeRefPrim", "ValueTest"], Config,
- [Rule|Opts]),
+ asn1_test_lib:compile("SeqTypeRefPrim", Config, [Rule|Opts]),
testTypeValueNotation:main(Rule, Opts).
+testValueTest(Config) -> test(Config, fun testValueTest/3).
+testValueTest(Config, Rule, Opts) ->
+ asn1_test_lib:compile("ValueTest", Config, [Rule|Opts]),
+ testValueTest:main().
+
testOpenTypeImplicitTag(Config) ->
test(Config, fun testOpenTypeImplicitTag/3).
testOpenTypeImplicitTag(Config, Rule, Opts) ->
asn1_test_lib:compile("OpenTypeImplicitTag", Config, [Rule|Opts]),
testOpenTypeImplicitTag:main(Rule).
-duplicate_tags(Config) ->
- DataDir = ?config(data_dir, Config),
- CaseDir = ?config(case_dir, Config),
- {error, [{error, {type, _, _, 'SeqOpt1Imp',
- {asn1, {duplicates_of_the_tags, _}}}}]} =
- asn1ct:compile(filename:join(DataDir, "SeqOptional2"),
- [abs, {outdir, CaseDir}]).
-
rtUI(Config) -> test(Config, fun rtUI/3).
rtUI(Config, Rule, Opts) ->
asn1_test_lib:compile("Prim", Config, [Rule|Opts]),
@@ -990,13 +982,22 @@ testS1AP(Config, Rule, Opts) ->
ok
end.
+testRfcs(Config) -> test(Config, fun testRfcs/3, [{ber,[der]}]).
+testRfcs(Config, Rule, Opts) ->
+ case erlang:system_info(system_architecture) of
+ "sparc-sun-solaris2.10" ->
+ {skip,"Too slow for an old Sparc"};
+ _ ->
+ testRfcs:compile(Config, Rule, Opts),
+ testRfcs:test()
+ end.
+
test_compile_options(Config) ->
ok = test_compile_options:wrong_path(Config),
ok = test_compile_options:path(Config),
ok = test_compile_options:noobj(Config),
ok = test_compile_options:record_name_prefix(Config),
- ok = test_compile_options:verbose(Config),
- ok = test_compile_options:warnings_as_errors(Config).
+ ok = test_compile_options:verbose(Config).
testDoubleEllipses(Config) -> test(Config, fun testDoubleEllipses/3).
testDoubleEllipses(Config, Rule, Opts) ->
@@ -1084,6 +1085,7 @@ test_modules() ->
"CommonDataTypes",
"Constraints",
"ContextSwitchingTypes",
+ "CoverParser",
"DS-EquipmentUser-CommonFunctionOrig-TransmissionPath",
"Enum",
"From",
@@ -1118,7 +1120,9 @@ test_modules() ->
"Def",
"Opt",
"ELDAPv3",
- "LDAP"].
+ "LDAP",
+ "SeqOptional2",
+ "CCSNARG3"].
test_OTP_9688(Config) ->
PrivDir = ?config(case_dir, Config),
diff --git a/lib/asn1/test/asn1_SUITE_data/BadTypeEnding.asn b/lib/asn1/test/asn1_SUITE_data/BadTypeEnding.asn
deleted file mode 100644
index 3ccd838ac0..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/BadTypeEnding.asn
+++ /dev/null
@@ -1,6 +0,0 @@
-BadTypeEnding DEFINITIONS ::=
-BEGIN
-
-T ::= Typ;
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/BadValueAssignment1.asn1 b/lib/asn1/test/asn1_SUITE_data/BadValueAssignment1.asn1
deleted file mode 100644
index a5d4984e60..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/BadValueAssignment1.asn1
+++ /dev/null
@@ -1,8 +0,0 @@
-BadValueAssignment1 DEFINITIONS ::=
-BEGIN
-
-int INTEGER ::= 3
-
-int2 integer ::= 3
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/BadValueAssignment2.asn1 b/lib/asn1/test/asn1_SUITE_data/BadValueAssignment2.asn1
deleted file mode 100644
index 7a96406001..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/BadValueAssignment2.asn1
+++ /dev/null
@@ -1,8 +0,0 @@
-BadValueAssignment2 DEFINITIONS ::=
-BEGIN
-
-int INTEGER ::= 3
-
-int2 ::= 3
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/BadValueSet.asn1 b/lib/asn1/test/asn1_SUITE_data/BadValueSet.asn1
deleted file mode 100644
index 68bd4380b7..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/BadValueSet.asn1
+++ /dev/null
@@ -1,9 +0,0 @@
-BadValueSet DEFINITIONS ::=
-BEGIN
-
-Int INTEGER ::= {1|2|3}
-
-Int2 INTEGER ::= {
- 1,2,3}
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/CCSNARG3.asn b/lib/asn1/test/asn1_SUITE_data/CCSNARG3.asn
index 23c1f32ceb..8932238adc 100644
--- a/lib/asn1/test/asn1_SUITE_data/CCSNARG3.asn
+++ b/lib/asn1/test/asn1_SUITE_data/CCSNARG3.asn
@@ -3,7 +3,7 @@ BEGIN
CallCentreServiceNotificationArg ::= SEQUENCE {
scriptInformation [0] ScriptToScriptInformation,
- eventInformation [1] IMPLICIT EventInformation OPTIONAL
+ eventInformation [1] EventInformation OPTIONAL
}
diff --git a/lib/asn1/test/asn1_SUITE_data/ChoExtension.asn1 b/lib/asn1/test/asn1_SUITE_data/ChoExtension.asn1
index f6fe18be10..18473bae30 100644
--- a/lib/asn1/test/asn1_SUITE_data/ChoExtension.asn1
+++ b/lib/asn1/test/asn1_SUITE_data/ChoExtension.asn1
@@ -41,10 +41,4 @@ ChoExt4 ::= CHOICE
str OCTET STRING
}
-ChoEmptyRoot ::= CHOICE {
- ...,
- bool BOOLEAN,
- int INTEGER (0..7)
-}
-
END
diff --git a/lib/asn1/test/asn1_SUITE_data/ChoiceBadExtension.asn1 b/lib/asn1/test/asn1_SUITE_data/ChoiceBadExtension.asn1
deleted file mode 100644
index d0789d7414..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/ChoiceBadExtension.asn1
+++ /dev/null
@@ -1,27 +0,0 @@
-ChoiceBadExtension DEFINITIONS ::=
-BEGIN
-
-Seq ::= SEQUENCE {
- ...,
- name PrintableString,
- location INTEGER {home(0),field(1),roving(2)},
- age INTEGER
- }
-
-Cho1 ::= CHOICE {
- name PrintableString,
- ...,
- location INTEGER {home(0),field(1),roving(2)},
- age INTEGER
- }
-
-Cho2 ::= CHOICE {
- ...,
- name PrintableString,
- location INTEGER {home(0),field(1),roving(2)},
- age INTEGER
- }
-
-END
-
-
diff --git a/lib/asn1/test/asn1_SUITE_data/ConstraintEquivalence.asn1 b/lib/asn1/test/asn1_SUITE_data/ConstraintEquivalence.asn1
index 8b3d151502..648275dd66 100644
--- a/lib/asn1/test/asn1_SUITE_data/ConstraintEquivalence.asn1
+++ b/lib/asn1/test/asn1_SUITE_data/ConstraintEquivalence.asn1
@@ -11,6 +11,10 @@ BEGIN
SingleValueX8 ::= INTEGER (integer42)
SingleValueX9 ::= INTEGER (integer42..integer42)
SingleValueX10 ::= INTEGER ((integer42) INTERSECTION (40..49))
+ SingleValueX11 ::= INTEGER (40..49) (integer42)
+ SingleValueX12 ::= INTEGER ((MIN..0) ^ (1..10) | integer42)
+ SingleValueX13 ::= INTEGER ((11..20) ^ (1..10) | integer42)
+ SingleValueX14 ::= INTEGER ((MIN..42) ^ (1..100) ^ (42..50))
UnconstrainedX0 ::= INTEGER
UnconstrainedX1 ::= INTEGER (MIN..MAX)
@@ -19,6 +23,10 @@ BEGIN
UnconstrainedX4 ::= INTEGER ((MIN..MAX)|9|10)
UnconstrainedX5 ::= INTEGER ((MIN..MAX)|10..20)
UnconstrainedX6 ::= INTEGER ((MIN..MAX) UNION (10..20))
+ UnconstrainedX7 ::= INTEGER ((MIN..MAX) ^ ((MIN..MAX) UNION (10..20)))
+ UnconstrainedX8 ::= INTEGER ((-100..MAX) ^ (42..MAX) | (MIN..41))
+ UnconstrainedX9 ::= INTEGER (UnconstrainedX0)
+ UnconstrainedX10 ::= INTEGER (UnconstrainedX0)(MIN..MAX)
RangeX00 ::= INTEGER (5..10)
RangeX01 ::= INTEGER (4<..<11)
@@ -38,22 +46,66 @@ BEGIN
RangeX16 ::= INTEGER ((5|6) UNION (7) UNION (7<..<11))
RangeX20 ::= INTEGER (0..20) (5..10)
- RangeX21 ::= INTEGER (0..10) (5..20)
- RangeX22 ::= INTEGER (0..10) (5..20) (MIN..MAX)
- RangeX23 ::= INTEGER ((0..10) INTERSECTION (5..20) ^ (MIN..MAX))
- RangeX24 ::= INTEGER ((5|6|7|8|9|10) INTERSECTION (5..20) ^ (MIN..MAX))
+ RangeX21 ::= INTEGER ((0..10) ^ (5..20))
+ RangeX22 ::= INTEGER ((0..10) ^ (5..20) ^ (MIN..MAX))
+ RangeX23 ::= INTEGER (MIN..MAX) (-100..20) (5..10)
+ RangeX24 ::= INTEGER (MIN..MAX) (0..100) (5..20) (5..10)
+ RangeX25 ::= INTEGER ((0..10) INTERSECTION (5..20) ^ (MIN..MAX))
+ RangeX26 ::= INTEGER ((5|6|7|8|9|10) INTERSECTION (5..20) ^ (MIN..MAX))
+
+ RangeX30 ::= INTEGER (((5|6) | (5..20)) ^ (0..10))
+ RangeX31 ::= INTEGER (((((5|6) | (5..20)) ^ (0..10))) ^ (MIN..MAX))
+ RangeX32 ::= INTEGER ((5|7) | (5..10))
+
+ Semi00 ::= INTEGER (0..MAX)
+ Semi01 ::= INTEGER (0..MAX) (MIN..MAX)
+ Semi02 ::= INTEGER ((0..100) UNION (200..MAX) UNION (50..1024))
+
+ RangeExtX00 ::= INTEGER (5..10, ...)
+ RangeExtX01 ::= INTEGER (0..20) (5..10, ...)
+ RangeExtX02 ::= INTEGER (RangeX26) (5..10, ...)
+-- RangeExtX03 ::= RangeX26 (5..10, ...)
+
+ MinRangeX00 ::= INTEGER (MIN..10)
+ MinRangeX01 ::= INTEGER ((MIN..0) | (0..10))
+ MinRangeX02 ::= INTEGER (MIN..MAX) (MIN..100) (MIN..10)
+ MinRangeX03 ::= INTEGER (((MIN..-100)|(-60..-50)) | (MIN..10))
+
+ DisjointRangeX00 ::= INTEGER (0..5 UNION 95..99)
+ DisjointRangeX01 ::= INTEGER (0|1|2|3|4|5|95|96|97|98|99)
+ DisjointRangeX02 ::= INTEGER (0..100) (0..2 UNION 95..99 UNION 3|4|5)
+ DisjointRangeX03 ::= INTEGER (MIN..MAX) (0..2 UNION 95..99 UNION 3|4|5)
+
+ MinDisjointRangeX00 ::= INTEGER (MIN..-100 UNION 100..1000)
+ MinDisjointRangeX01 ::= INTEGER (MIN..-100 UNION 100..1000 UNION (MIN..-100))
+ MinDisjointRangeX02 ::= INTEGER (MIN..-50000 UNION 100..1000 UNION (MIN..-100))
+ MinDisjointRangeX03 ::= INTEGER (MIN..-100 UNION 100..1000 UNION (MIN..-1000000))
+ MinDisjointRangeX04 ::= INTEGER (MIN..-100 UNION 100..1000 UNION (MIN..-1000000))
+ MinDisjointRangeX05 ::= INTEGER (MIN..-100 ^ (MIN..-100) UNION 100..1000)
+ MinDisjointRangeX06 ::= INTEGER (MIN..-100 ^ (MIN..0) UNION 100..1000)
UnconstrainedStringX00 ::= IA5String
UnconstrainedStringX01 ::= IA5String (SIZE (0..MAX))
+ UnconstrainedStringX02 ::= IA5String (SIZE (0..42|43..MAX))
ConstrainedStringX00 ::= IA5String (SIZE (0..5))
ConstrainedStringX01 ::= IA5String (SIZE (0|1|2|3|4|5))
+ StringExtFromX00 ::= IA5String (FROM ("AB", ..., "CD"))(SIZE (1..10, ..., 15..20))
+ StringExtFromX01 ::= IA5String (FROM ("AB", ..., "CD"))(SIZE (1..10, ..., 15..20))
+ StringExtFromX02 ::= IA5String ((FROM ("AB", ..., "CD")) ^ ((SIZE (1..10, ..., 15..20))))
+ StringExtFromX03 ::= IA5String ((FROM ("AB", ..., "CD")) ^ (SIZE (1..10, ..., 15..20)))
+ StringExtFromX04 ::= IA5String (StringExtFromX00)
+
-- Note: None of the back-ends care about the exact values
-- outside of the root range.
ExtConstrainedStringX00 ::= IA5String (SIZE (1..2, ...))
ExtConstrainedStringX01 ::= IA5String (SIZE (1|2, ..., 3))
ExtConstrainedStringX02 ::= IA5String (SIZE (1|2, ..., 3|4|5))
+ ExtConstrainedStringX03 ::= IA5String (SIZE (1|2, ..., 1|2|3|4|5))
+ ExtConstrainedStringX04 ::= IA5String (SIZE (1|2), ..., SIZE (1|2|3|4|5))
+ ExtConstrainedStringX05 ::= IA5String (SIZE (1|2, ...), ...,
+ SIZE (1|2|3|4|5, ...))
integer4 INTEGER ::= 4
integer11 INTEGER ::= 11
diff --git a/lib/asn1/test/asn1_SUITE_data/Constraints.py b/lib/asn1/test/asn1_SUITE_data/Constraints.py
index 3495cd841b..a40c513141 100644
--- a/lib/asn1/test/asn1_SUITE_data/Constraints.py
+++ b/lib/asn1/test/asn1_SUITE_data/Constraints.py
@@ -81,7 +81,7 @@ maxNrOfCellPortionsPerCell-1 INTEGER ::= 35
CellPortionID ::= INTEGER (0..maxNrOfCellPortionsPerCell-1,...)
-- OTP-6763
-T ::= IA5String (SIZE (1|2, ..., SIZE (1|2|3))) -- Dubuisson 268
+T ::= IA5String (SIZE (1|2), ..., SIZE (1|2|3)) -- Dubuisson 268
T2 ::= IA5String (SIZE (1|2, ..., 3)) -- equal with T
-- OTP-8046
@@ -144,5 +144,47 @@ NonOverlapping ::= INTEGER (7280..7560 |
23000..24000 |
24960..26900)
+--
+-- Test INTEGER constraints from fields in objects.
+--
+
+INT-HOLDER ::= CLASS {
+ &id INTEGER UNIQUE,
+ &obj INT-HOLDER OPTIONAL
+} WITH SYNTAX {
+ ID &id
+ [OBJ &obj]
+}
+
+int-holder-1 INT-HOLDER ::= { ID 2 }
+int-holder-2 INT-HOLDER ::= { ID 4 OBJ int-holder-1 }
+
+IntObjectConstr ::= INTEGER (int-holder-2.&obj.&id..int-holder-2.&id)
+
+--
+-- INTEGER constraints defined using named INTEGERs.
+--
+
+ConstrainedNamedInt ::= INTEGER {v1(42)} (v1)
+constrainedNamedInt-1 INTEGER {v1(42)} (v1) ::= 42
+constrainedNamedInt-2 ConstrainedNamedInt ::= 100
+
+SeqWithNamedInt ::= SEQUENCE {
+ int INTEGER {v2(7)} (v2)
+}
+
+--
+-- Cover simpletable constraint checking code.
+--
+
+ContentInfo ::= SEQUENCE {
+ contentType ContentType
+}
+
+Contents TYPE-IDENTIFIER ::= {
+ {OCTET STRING IDENTIFIED BY {2 1 1 1 1 1 1}}
+}
+
+ContentType ::= TYPE-IDENTIFIER.&id({Contents})
END
diff --git a/lib/asn1/test/asn1_SUITE_data/CoverParser.asn1 b/lib/asn1/test/asn1_SUITE_data/CoverParser.asn1
new file mode 100644
index 0000000000..75d40188ca
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/CoverParser.asn1
@@ -0,0 +1,57 @@
+CoverParser DEFINITIONS AUTOMATIC TAGS ::=
+BEGIN
+
+ Cho1 ::= CHOICE {
+ i INTEGER,
+ ... ! 42,
+ [[ b BOOLEAN ]]
+ }
+
+ Cho2 ::= CHOICE {
+ i INTEGER,
+ ...,
+ [[ b BOOLEAN,
+ s IA5String ]],
+ ...
+ }
+
+ Int1 ::= INTEGER (CONSTRAINED BY {INTEGER:1,INTEGER:2})
+
+ Seq1 ::= SEQUENCE {
+ ... ! INTEGER:1
+ }
+
+ Seq2 ::= SEQUENCE {
+ ... ! INTEGER:1,
+ i INTEGER
+ }
+
+ Seq3 ::= SEQUENCE {
+ b BOOLEAN,
+ ... ! INTEGER:1,
+ i INTEGER
+ }
+
+ Seq4 ::= SEQUENCE {
+ a INTEGER OPTIONAL,
+ b OCTET STRING OPTIONAL
+ } (WITH COMPONENTS {a ABSENT, b OPTIONAL} |
+ WITH COMPONENTS {a PRESENT, b PRESENT})
+
+ SeqOf1 ::= SEQUENCE OF INTEGER
+ SeqOf2 ::= SeqOf1 (WITH COMPONENT (0..7))
+
+ SegOf3 ::= SEQUENCE (SIZE (1..10)) OF id INTEGER
+
+ Set1 ::= SET {
+ ... ! INTEGER:1
+ }
+
+ Set2 ::= SET {
+ ... ! INTEGER:1,
+ a INTEGER
+ }
+
+ SetOf3 ::= SET (SIZE (1..10)) OF id INTEGER
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/DoubleEllipses.asn b/lib/asn1/test/asn1_SUITE_data/DoubleEllipses.asn
index e90cf55d61..a96425cbea 100644
--- a/lib/asn1/test/asn1_SUITE_data/DoubleEllipses.asn
+++ b/lib/asn1/test/asn1_SUITE_data/DoubleEllipses.asn
@@ -12,6 +12,15 @@ Seq ::= SEQUENCE
c BOOLEAN
}
+SeqV1 ::= SEQUENCE
+ {
+ a INTEGER,
+ ...,
+ b BOOLEAN,
+ ...
+ }
+
+
SeqV2 ::= SEQUENCE
{
a INTEGER,
@@ -57,6 +66,14 @@ Set ::= SET {
c BOOLEAN
}
+
+SetV1 ::= SET {
+ a INTEGER,
+ ...,
+ b BOOLEAN,
+ ...
+ }
+
SetV2 ::= SET
{
a INTEGER,
@@ -96,4 +113,4 @@ SetAltV2 ::= SET
}
-END \ No newline at end of file
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/EnumExt.asn1 b/lib/asn1/test/asn1_SUITE_data/EnumExt.asn1
index 74fa97e7aa..55ad5a01a1 100644
--- a/lib/asn1/test/asn1_SUITE_data/EnumExt.asn1
+++ b/lib/asn1/test/asn1_SUITE_data/EnumExt.asn1
@@ -53,5 +53,7 @@ SeqBig ::= SEQUENCE {
i INTEGER
}
+EnumSkip ::= ENUMERATED {a(2), ..., b, c, d, e, f}
+
END
diff --git a/lib/asn1/test/asn1_SUITE_data/Example.asn1 b/lib/asn1/test/asn1_SUITE_data/Example.asn1
deleted file mode 100644
index 2639f63940..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/Example.asn1
+++ /dev/null
@@ -1,20 +0,0 @@
-Example DEFINITIONS ::=
-BEGIN
-
-T ::= Typ
-
-Typ ::= SEQUENCE {
- a b,
- c Typ}
---ECLASS ::= CLASS {
--- &num INTEGER UNIQUE,
--- &Typo
--- } WITH SYNTAX {
--- &Typo DETERMINED BY &num
--- }
-
---v1 ECLASS ::= {INTEGER DETERMINED BY 12}
-
---v2 INTEGER ::= 13
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/Export1.asn b/lib/asn1/test/asn1_SUITE_data/Export1.asn
deleted file mode 100644
index 78ead8f4d2..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/Export1.asn
+++ /dev/null
@@ -1,7 +0,0 @@
-Export1 DEFINITIONS ::=
-BEGIN
-EXPORTS T
-
-T ::= Typ
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/Exporting.asn1 b/lib/asn1/test/asn1_SUITE_data/Exporting.asn1
new file mode 100644
index 0000000000..e4f32f6788
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/Exporting.asn1
@@ -0,0 +1,18 @@
+Exporting DEFINITIONS AUTOMATIC TAGS ::=
+BEGIN
+
+ Seq ::= SEQUENCE { id INTEGER, f BOOLEAN }
+ PtSeq{T} ::= SEQUENCE { a T }
+
+ CL ::= CLASS {
+ &id INTEGER UNIQUE,
+ &Type
+ } WITH SYNTAX {
+ ID &id TYPE &Type
+ }
+
+ obj CL ::= { ID 1 TYPE OCTET STRING }
+
+ pt-object{CL:ob} CL ::= {ID ob.&id TYPE OCTET STRING}
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/ExtensibilityImplied.asn1 b/lib/asn1/test/asn1_SUITE_data/ExtensibilityImplied.asn1
new file mode 100644
index 0000000000..d59b0edda5
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/ExtensibilityImplied.asn1
@@ -0,0 +1,30 @@
+ExtensibilityImplied DEFINITIONS
+AUTOMATIC TAGS
+EXTENSIBILITY IMPLIED
+::=
+BEGIN
+
+Enum1 ::= ENUMERATED { root, ..., ext }
+Enum2 ::= ENUMERATED { root }
+
+Seq1 ::= SEQUENCE {
+ b BOOLEAN,
+ ...,
+ i INTEGER
+}
+
+Seq2 ::= SEQUENCE {
+ b BOOLEAN
+}
+
+Set1 ::= SET {
+ b BOOLEAN,
+ ...,
+ i INTEGER
+}
+
+Set2 ::= SET {
+ b BOOLEAN
+}
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/IllegalExport.asn1 b/lib/asn1/test/asn1_SUITE_data/IllegalExport.asn1
deleted file mode 100644
index 1b5e42ad3c..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/IllegalExport.asn1
+++ /dev/null
@@ -1,7 +0,0 @@
-IllegalExport DEFINITIONS ::=
-BEGIN
-EXPORTS T, KalleAnka;
-
-T ::= INTEGER
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/Importing.asn1 b/lib/asn1/test/asn1_SUITE_data/Importing.asn1
new file mode 100644
index 0000000000..2f2699c576
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/Importing.asn1
@@ -0,0 +1,20 @@
+Importing DEFINITIONS AUTOMATIC TAGS ::=
+BEGIN
+
+ Seq ::= Exporting.PtSeq{ INTEGER(0..7) }
+ OtherSeq ::= Exporting.Seq
+
+ seq Exporting.Seq ::= { id 42, f TRUE }
+
+ o1 Exporting.CL ::= { ID 2 TYPE INTEGER (0..63) }
+
+ ObjSet Exporting.CL ::= { o1 | Exporting.obj }
+
+ ObjSeq ::= SEQUENCE {
+ id Exporting.CL.&id ({ObjSet}),
+ type Exporting.CL.&Type ({ObjSet}{@id})
+ }
+
+ o1-cloned Exporting.CL ::= Exporting.pt-object{o1}
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/InfObj.asn b/lib/asn1/test/asn1_SUITE_data/InfObj.asn
index 719119f418..3b88770d78 100644
--- a/lib/asn1/test/asn1_SUITE_data/InfObj.asn
+++ b/lib/asn1/test/asn1_SUITE_data/InfObj.asn
@@ -206,7 +206,9 @@ ConstructedDefaultSet CONSTRUCTED-DEFAULT ::= {
{ &id 4, &Type SET { a INTEGER, b BIT STRING } } |
{ &id 5, &Type CHOICE { i INTEGER, b BIT STRING } } |
{ &id 6, &Type SEQUENCE OF INTEGER (1..16) } |
- { &id 7, &Type SET OF INTEGER (1..64) }
+ { &id 7, &Type SET OF INTEGER (1..64) } |
+ { &id 8, &Type SEQUENCE OF SEQUENCE { x INTEGER, y INTEGER } } |
+ { &id 9, &Type SET OF SEQUENCE { x INTEGER, y INTEGER } }
}
ConstructedPdu ::= SEQUENCE {
@@ -288,18 +290,196 @@ OstSeq1234 ::= ObjectSetTest{ {Ost1234} }
OstSeq45 ::= ObjectSetTest{ {Ost45} }
OstSeq12345 ::= ObjectSetTest{ {Ost12345} }
+OstSeq12Except ::= ObjectSetTest{ {Ost123 EXCEPT ost3} }
+OstSeq123Except ::= ObjectSetTest{ {Ost12345 EXCEPT Ost45} }
+
+ExOst1 OBJECT-SET-TEST ::= { ost1, ... }
ExOst12 OBJECT-SET-TEST ::= { ost1, ..., ost2 }
ExOst123 OBJECT-SET-TEST ::= { ost3, ..., ExOst12 }
---ExOst1234 OBJECT-SET-TEST ::= { ExOst123, ..., ost4 }
+ExOst1234 OBJECT-SET-TEST ::= { ExOst123, ..., ost4 }
ExOst45 OBJECT-SET-TEST ::= { ost4, ..., ost5 }
ExOst12345 OBJECT-SET-TEST ::= { ExOst123, ..., ExOst45 }
+ExOstSeq1 ::= ObjectSetTest{ {ExOst1} }
ExOstSeq12 ::= ObjectSetTest{ {ExOst12} }
ExOstSeq123 ::= ObjectSetTest{ {ExOst123} }
---ExOstSeq1234 ::= ObjectSetTest{ {ExOst1234} }
+ExOstSeq1234 ::= ObjectSetTest{ {ExOst1234} }
ExOstSeq45 ::= ObjectSetTest{ {ExOst45} }
ExOstSeq12345 ::= ObjectSetTest{ {ExOst12345} }
-END
+ExOstSeq12Except ::= ObjectSetTest{ {ExOst123 EXCEPT ost3} }
+ExOstSeq123Except ::= ObjectSetTest{ {ExOst12345 EXCEPT ExOst45} }
+
+ExInlOst1 OBJECT-SET-TEST ::= {
+ { 1 IS BIT STRING },
+ ...
+}
+ExInlOst12 OBJECT-SET-TEST ::= {
+ { 1 IS BIT STRING },
+ ...,
+ { 2 IS OCTET STRING }
+}
+
+ExInlOstSeq1 ::= ObjectSetTest{ {ExInlOst1} }
+ExInlOstSeq12 ::= ObjectSetTest{ {ExInlOst12} }
+
+--
+-- Test that extensions in a simple class works.
+--
+
+ExtClassSeq ::= SEQUENCE {
+ arg EXT-CLASS.&id({Extend})
+}
+
+EXT-CLASS ::= CLASS {
+ &id INTEGER UNIQUE
+} WITH SYNTAX {
+ ID &id
+}
+
+Extend EXT-CLASS ::= { { ID alt1 } | { ID alt2 }, ... }
+
+alt1 INTEGER ::= 4
+alt2 INTEGER ::= 5
+
+
+--
+-- Test a BIT STRING which is optional in the simplified syntax.
+--
+
+PUBLIC-KEY ::= CLASS {
+ &id INTEGER UNIQUE,
+ &keyUsage KeyUsage OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [OPTIONAL-BIT-STRING &keyUsage]
+}
+
+KeyUsage ::= BIT STRING {
+ digitalSignature (0),
+ nonRepudiation (1),
+ keyEncipherment (2)
+ }
+
+object-with-optional-bit-string PUBLIC-KEY ::= {
+ IDENTIFIER 42
+ OPTIONAL-BIT-STRING {digitalSignature, nonRepudiation, keyEncipherment}
+}
+
+-- Test object identifiers from objects.
+
+CONTAINER ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &rid RELATIVE-OID OPTIONAL,
+ &Type OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIED BY &id
+ [REL-OID &rid]
+ [TYPE &Type]
+}
+
+id1 OBJECT IDENTIFIER ::= {1 2 42}
+obj1 CONTAINER ::= { IDENTIFIED BY id1 REL-OID {100 101} }
+
+value-2 OBJECT IDENTIFIER ::= { value-1 25 }
+value-1 OBJECT IDENTIFIER ::= obj1.&id
+value-3 RELATIVE-OID ::= obj1.&rid
+value-4 OBJECT IDENTIFIER ::= { 1 2 value-3 }
+
+
+-- Test an obscure issue when ATTRIBUTE.&id was not
+-- properly evaluated.
+
+Rdn ::= SingleAttribute { {SupportedAttributes} }
+
+ATTRIBUTE ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Type OPTIONAL
+}
+
+SingleAttribute{ATTRIBUTE:AttrSet} ::= SEQUENCE {
+ type ATTRIBUTE.&id({AttrSet}),
+ value ATTRIBUTE.&Type({AttrSet}{@type})
+}
+AttributeType ::= ATTRIBUTE.&id
+SupportedAttributes ATTRIBUTE ::= { at-name }
+
+id-at OBJECT IDENTIFIER ::= { 2 5 4 41 }
+id-at-name AttributeType ::= id-at
+at-name ATTRIBUTE ::= { &Type PrintableString, &id id-at-name }
+
+--
+-- Test using an alias for TYPE-IDENTIFIER.
+--
+
+TiAliasParameterized { TI-ALIAS:InfoObjectSet } ::= SEQUENCE {
+ algorithm TI-ALIAS.&id({InfoObjectSet}),
+ parameters TI-ALIAS.&Type({InfoObjectSet} {@algorithm}) OPTIONAL
+}
+
+TI-ALIAS ::= TYPE-IDENTIFIER
+
+TiAliasSeq ::= SEQUENCE {
+ prf TiAliasParameterized {{TiAliasSet}}
+}
+
+TiAliasSet TI-ALIAS ::= {
+ {NULL IDENTIFIED BY {2 1 2}},
+ ...
+}
+
+--
+-- Test using an alias for a class.
+--
+
+ALIAS-CONTAINER ::= CLASS {
+ &id INTEGER UNIQUE,
+ &obj INDIRECT-CLASS
+}
+
+INDIRECTED-CLASS ::= CLASS {
+ &id INTEGER UNIQUE,
+ &Type
+}
+
+INDIRECT-CLASS ::= INDIRECTED-CLASS
+
+--
+-- Indirect ObjectClassFieldType in a SEQUENCE.
+--
+
+ContentInfo ::= SEQUENCE {
+ contentType ContentType, -- Indirect ObjectClassFieldType
+ content TYPE-IDENTIFIER.&Type({Contents}{@contentType})
+OPTIONAL
+}
+
+Contents TYPE-IDENTIFIER ::= {
+ {IA5String IDENTIFIED BY id-content-type}
+}
+
+ContentType ::= TYPE-IDENTIFIER.&id({Contents})
+id-content-type ContentType ::= { 2 7 8 9 }
+
+--
+-- Tricky parsing of simplified syntax.
+--
+
+TrickyType-1 ::= BIT STRING
+TrickyType-2 ::= OCTET STRING
+
+TRICKY ::= CLASS {
+ &Type1,
+ &Type2
+} WITH SYNTAX {
+ TYPE &Type1 &Type2
+}
+
+tricky-object TRICKY ::= {TYPE TrickyType-1 TrickyType-2}
+
+tricky-bit-string tricky-object.&Type1 ::= '1011'B
+tricky-octet-string tricky-object.&Type1 ::= 'CAFE'H
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/InfObjExtract.asn1 b/lib/asn1/test/asn1_SUITE_data/InfObjExtract.asn1
new file mode 100644
index 0000000000..13981b546d
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/InfObjExtract.asn1
@@ -0,0 +1,136 @@
+InfObjExtract DEFINITIONS AUTOMATIC TAGS ::=
+BEGIN
+
+DATA-CLASS ::= CLASS {
+ &id INTEGER UNIQUE,
+ &Type
+} WITH SYNTAX {
+ ID &id
+ TYPE &Type
+}
+
+data-object-1 DATA-CLASS ::= { ID 1 TYPE BOOLEAN }
+data-object-2 DATA-CLASS ::= { ID 2 TYPE OCTET STRING }
+data-object-3 DATA-CLASS ::= { ID 3 TYPE BIT STRING }
+
+ObjSet DATA-CLASS ::= {
+ holder-object-1.&obj |
+ data-object-2 |
+ data-object-3,
+ ...
+}
+
+OBJ-SET DATA-CLASS ::= {
+ holder-object-1.&obj |
+ data-object-2 |
+ data-object-3,
+ ...
+}
+
+SingleElementSet DATA-CLASS ::= {
+ holder-object-1.&obj
+}
+
+holder-object-1 HOLDER-CLASS ::= {
+ OBJ data-object-1
+}
+
+holder-object-2 HOLDER-CLASS ::= {
+ OBJ-SET {data-object-1}
+}
+
+holder-object-3 HOLDER-CLASS ::= {
+ OBJ-SET {holder-object-2.&ObjSet}
+}
+
+-- Note: References to object sets with names in all uppercase/hyphens
+-- may be represented differently compared to object sets with names
+-- that contain lowercase letters. CAVEAT TESTOR.
+
+HOLDER-OBJECTS HOLDER-CLASS ::= { holder-object-2 }
+HolderObjects HOLDER-CLASS ::= { holder-object-3 }
+
+holder-object-4 HOLDER-CLASS ::= {
+ OBJ-SET { HOLDER-OBJECTS.&ObjSet }
+}
+
+holder-object-5 HOLDER-CLASS ::= {
+ OBJ-SET { HolderObjects.&ObjSet }
+}
+
+holder-object-6 HOLDER-CLASS ::= {
+ OBJ-SET { OBJ-SET }
+}
+
+holder-object-7 HOLDER-CLASS ::= {
+ OBJ-SET { ObjSet }
+}
+
+HOLDER-CLASS ::= CLASS {
+ &obj DATA-CLASS OPTIONAL,
+ &ObjSet DATA-CLASS OPTIONAL
+} WITH SYNTAX {
+ [OBJ &obj]
+ [OBJ-SET &ObjSet]
+}
+
+TestSeq{DATA-CLASS:ObjectSet} ::= SEQUENCE {
+ id DATA-CLASS.&id ({ObjectSet}),
+ data DATA-CLASS.&Type ({ObjectSet}{@id})
+}
+
+DataSeq-1 ::= TestSeq{ {ObjSet} }
+DataSeq-2 ::= TestSeq{ {holder-object-3.&ObjSet} }
+
+DataSeq-3 ::= TestSeq{ {holder-object-4.&ObjSet} }
+DataSeq-4 ::= TestSeq{ {holder-object-5.&ObjSet} }
+DataSeq-5 ::= TestSeq{ {holder-object-6.&ObjSet} }
+DataSeq-6 ::= TestSeq{ {holder-object-7.&ObjSet} }
+
+DataSeqSingleSet-1 ::= TestSeq{ {SingleElementSet} }
+DataSeqSingleSet-2 ::= TestSeq{ {holder-object-1.&obj} }
+
+--
+-- Test ObjectSetFromObjects.
+--
+
+OBJ-CLASS ::= CLASS {
+ &id INTEGER UNIQUE,
+ &Data OPTIONAL,
+ &Obj OBJ-CLASS OPTIONAL,
+ &obj OBJ-CLASS OPTIONAL
+}
+
+obj-class-obj-1 OBJ-CLASS ::= { &id 1, &Data BOOLEAN }
+
+obj-class-obj-2 OBJ-CLASS ::= { &id 2, &Data BOOLEAN,
+ &Obj {obj-class-obj-1} }
+
+obj-class-obj-3 OBJ-CLASS ::= { &id 3, &Data BOOLEAN,
+ &obj {&id 99, &Obj {obj-class-obj-1}} }
+
+obj-class-obj-4 OBJ-CLASS ::= { &id 4, &Data BOOLEAN, &obj obj-class-obj-2 }
+
+obj-class-obj-5 OBJ-CLASS ::= { &id 5, &Data BOOLEAN,
+ &Obj {obj-class-obj-4.&obj} }
+
+ObjClassSet OBJ-CLASS ::= { obj-class-obj-3.&obj.&Obj |
+ obj-class-obj-4.&Obj | -- Non-existing field
+ obj-class-obj-5.&Obj
+ }
+
+TestObjClassSeq{OBJ-CLASS:ObjectSet} ::= SEQUENCE {
+ id OBJ-CLASS.&id ({ObjectSet}),
+ data OBJ-CLASS.&Data ({ObjectSet}{@id})
+}
+
+ObjClassSeq-1 ::= TestObjClassSeq{{ObjClassSet}}
+
+--
+-- Test several levels of inlined definitions.
+--
+
+obj-class-obj-6 OBJ-CLASS ::= { &id 6, &Obj {{&id 100, &Data INTEGER}},
+ &Data INTEGER }
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/MissingEnd.asn1 b/lib/asn1/test/asn1_SUITE_data/MissingEnd.asn1
deleted file mode 100644
index 66912ef693..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/MissingEnd.asn1
+++ /dev/null
@@ -1,5 +0,0 @@
-MissingEnd DEFINITIONS ::=
-BEGIN
-
-T ::= Typ
-
diff --git a/lib/asn1/test/asn1_SUITE_data/ObjIdValues.asn1 b/lib/asn1/test/asn1_SUITE_data/ObjIdValues.asn1
index 9368e8dceb..9193ed495c 100644
--- a/lib/asn1/test/asn1_SUITE_data/ObjIdValues.asn1
+++ b/lib/asn1/test/asn1_SUITE_data/ObjIdValues.asn1
@@ -50,6 +50,7 @@ itu-t-o OBJECT IDENTIFIER ::= {itu-t recommendation o}
itu-t-p OBJECT IDENTIFIER ::= {itu-t recommendation p}
itu-t-q OBJECT IDENTIFIER ::= {itu-t recommendation q}
itu-t-r OBJECT IDENTIFIER ::= {itu-t recommendation r}
+itu-t-s OBJECT IDENTIFIER ::= {itu-t recommendation s}
itu-t-t OBJECT IDENTIFIER ::= {itu-t recommendation t}
itu-t-u OBJECT IDENTIFIER ::= {itu-t recommendation u}
itu-t-v OBJECT IDENTIFIER ::= {itu-t recommendation v}
diff --git a/lib/asn1/test/asn1_SUITE_data/ParamBasic.asn1 b/lib/asn1/test/asn1_SUITE_data/ParamBasic.asn1
index 68fc782f33..d203b6c816 100644
--- a/lib/asn1/test/asn1_SUITE_data/ParamBasic.asn1
+++ b/lib/asn1/test/asn1_SUITE_data/ParamBasic.asn1
@@ -42,4 +42,37 @@ SIGNATURE-ALGORITHM ::= CLASS {
KEY &id CONTAINING &Type
}
+alg-seq-1 AnAlgorithm ::= { algorithm 1, type 42 }
+alg-seq-2 AnAlgorithm ::= { algorithm 2, type TRUE }
+
+--
+-- Test that indirect classes references are resolved.
+--
+
+AlgorithmIdentifier2 { ALGORITHM-IDENTIFIER:InfoObjectSet } ::= SEQUENCE {
+ algorithm ALGORITHM-IDENTIFIER.&id({InfoObjectSet}),
+ parameters ALGORITHM-IDENTIFIER.&Type({InfoObjectSet} {@algorithm}) OPTIONAL
+}
+
+ALGORITHM-IDENTIFIER ::= TYPE-IDENTIFIER
+
+Seq ::= SEQUENCE {
+ c1 AlgorithmIdentifier2 {{ObjectSet-1}},
+ c2 AlgorithmIdentifier2 {{ObjectSet-2}}
+}
+
+ObjectSet-1 ALGORITHM-IDENTIFIER ::= { {INTEGER IDENTIFIED BY {2 1 1}}, ... }
+ObjectSet-2 ALGORITHM-IDENTIFIER ::= { ... }
+
+-- Test a value that uses the instantiation of a parameterized type inline.
+-- (Adapted from PKCS-5.)
+--
+
+algid-hmacWithSHA1 AlgorithmIdentifier2 {{ObjectSet-3}} ::=
+ {algorithm id-hmacWithSHA1, parameters NULL : NULL}
+
+ObjectSet-3 TYPE-IDENTIFIER ::= { {NULL IDENTIFIED BY id-hmacWithSHA1} }
+
+id-hmacWithSHA1 OBJECT IDENTIFIER ::= {2 9 9 9 7}
+
END
diff --git a/lib/asn1/test/asn1_SUITE_data/Prim.asn1 b/lib/asn1/test/asn1_SUITE_data/Prim.asn1
index cc0e61422a..b4c011fd39 100644
--- a/lib/asn1/test/asn1_SUITE_data/Prim.asn1
+++ b/lib/asn1/test/asn1_SUITE_data/Prim.asn1
@@ -24,6 +24,8 @@ BEGIN
friday(5),saturday(6),sunday(7)}
SingleEnumVal ::= ENUMERATED {true}
SingleEnumValExt ::= ENUMERATED {true, ...}
+ NegEnumVal ::= ENUMERATED {neg(-1), ..., zero(0)}
+ EnumVal128 ::= ENUMERATED {val(128)}
ObjId ::= OBJECT IDENTIFIER
diff --git a/lib/asn1/test/asn1_SUITE_data/SelectionType.asn b/lib/asn1/test/asn1_SUITE_data/SelectionType.asn
index d7bfbf1788..6163f390dd 100644
--- a/lib/asn1/test/asn1_SUITE_data/SelectionType.asn
+++ b/lib/asn1/test/asn1_SUITE_data/SelectionType.asn
@@ -14,7 +14,7 @@ Element ::= CHOICE {bool BOOLEAN,
utf UTF8String,
ro RELATIVE-OID,
nums NumericString,
- symbol PrintableString,
+ symbol PrintableString,
telet TeletexString,
t61 T61String,
video VideotexString,
@@ -23,13 +23,14 @@ Element ::= CHOICE {bool BOOLEAN,
generalizedTime GeneralizedTime,
gs GraphicString,
vs VisibleString,
--- iso64 ISO646String,
generalString GeneralString,
univ UniversalString,
cs CHARACTER STRING,
bmp BMPString}
-MendeleyevTable ::= SEQUENCE OF symbol < Element
+MendeleyevTable ::= SEQUENCE OF symbol < Element
+MendeleyevSet ::= SET OF atomic-no < Element
+
BoolType ::= bool < Element
einsteinium symbol < Element ::= "Es"
@@ -51,7 +52,6 @@ utctimev utctime < Element ::= "9805281429Z"
gTime generalizedTime < Element ::= "19980528142905.1"
gsv gs < Element ::= "graphic"
vsv vs < Element ::= "visible"
---iso64v iso64 < Element ::= "iso"
gStringv generalString < Element ::= "general"
univv univ < Element ::= "Universal"
bmov bmp < Element ::= "bmp"
diff --git a/lib/asn1/test/asn1_SUITE_data/Seq.py b/lib/asn1/test/asn1_SUITE_data/Seq.py
index f345373ab5..b68f9045a6 100644
--- a/lib/asn1/test/asn1_SUITE_data/Seq.py
+++ b/lib/asn1/test/asn1_SUITE_data/Seq.py
@@ -142,7 +142,10 @@ SeqImp3 ::= SET
set Set1
}
-
+SeqCompOf ::= SEQUENCE {
+ ...,
+ COMPONENTS OF SeqS3
+}
END
diff --git a/lib/asn1/test/asn1_SUITE_data/SeqOptional2.asn b/lib/asn1/test/asn1_SUITE_data/SeqOptional2.asn
index 7de9134096..bb85c9e418 100644
--- a/lib/asn1/test/asn1_SUITE_data/SeqOptional2.asn
+++ b/lib/asn1/test/asn1_SUITE_data/SeqOptional2.asn
@@ -15,10 +15,10 @@ SeqOpt1Imp ::= SEQUENCE
bool1 [1] BOOLEAN OPTIONAL,
int1 INTEGER,
seq1 [2] SeqIn OPTIONAL,
- seq2 [2] SeqIn OPTIONAL,
+ seq2 [3] SeqIn OPTIONAL,
...,
- int2 [3] SeqIn,
- int3 [3] SeqIn
+ int2 [4] SeqIn,
+ int3 [5] SeqIn
}
SeqOpt1Exp ::= SEQUENCE
diff --git a/lib/asn1/test/asn1_SUITE_data/SequenceBadComma.asn b/lib/asn1/test/asn1_SUITE_data/SequenceBadComma.asn
deleted file mode 100644
index 436815aa9b..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/SequenceBadComma.asn
+++ /dev/null
@@ -1,10 +0,0 @@
-SequenceBadComma DEFINITIONS IMPLICIT TAGS ::=
-BEGIN
-EXPORTS Person;
-
-Person ::= [PRIVATE 19] SEQUENCE {,
- name PrintableString,
- location INTEGER {home(0),field(1),roving(2)},
- age INTEGER OPTIONAL
- }
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/SequenceBadComponentName.asn1 b/lib/asn1/test/asn1_SUITE_data/SequenceBadComponentName.asn1
deleted file mode 100644
index 8b2b8816db..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/SequenceBadComponentName.asn1
+++ /dev/null
@@ -1,10 +0,0 @@
-SequenceBadComponentName DEFINITIONS ::=
-BEGIN
-
-T ::= Typ
-
-Typ ::= SEQUENCE {
- a INTEGER,
- C Typ}
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/SequenceBadComponentType.asn1 b/lib/asn1/test/asn1_SUITE_data/SequenceBadComponentType.asn1
deleted file mode 100644
index 0c33f48906..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/SequenceBadComponentType.asn1
+++ /dev/null
@@ -1,10 +0,0 @@
-SequenceBadComponentType DEFINITIONS ::=
-BEGIN
-
-T ::= Typ
-
-Typ ::= SEQUENCE {
- a b,
- c T}
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/Syntax.py b/lib/asn1/test/asn1_SUITE_data/Syntax.py
deleted file mode 100644
index 867d1148e1..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/Syntax.py
+++ /dev/null
@@ -1,10 +0,0 @@
-Syntax DEFINITIONS IMPLICIT TAGS ::=
-BEGIN
-EXPORTS Person;
-
-Person ::= [PRIVATE 19] SEQUENCE {,
- name PrintableString,
- location INTEGER {home(0),field(1),roving(2)},
- age INTEGER OPTIONAL
- }
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/ValueTest.asn b/lib/asn1/test/asn1_SUITE_data/ValueTest.asn
index dae9ae498a..b2c59d686a 100644
--- a/lib/asn1/test/asn1_SUITE_data/ValueTest.asn
+++ b/lib/asn1/test/asn1_SUITE_data/ValueTest.asn
@@ -1,4 +1,4 @@
-ValueTest DEFINITIONS ::=
+ValueTest DEFINITIONS AUTOMATIC TAGS ::=
BEGIN
@@ -23,8 +23,15 @@ vENUMERATED RadioButton ::= button1
vBS BSNNL ::= {zero,two}
vNULL NULL ::= NULL
vOS OCTET STRING ::= '313233'H
-vOD OBJECT IDENTIFIER ::= {2 1 1}
+-- OBJECT IDENTIFIER
+vOD OBJECT IDENTIFIER ::= {2 1 1}
+one INTEGER ::= 1
+integer-first OBJECT IDENTIFIER ::= {one 2}
+rel-oid-1 RELATIVE-OID ::= {2 4 5}
+include-roid OBJECT IDENTIFIER ::= {0 rel-oid-1}
+include-oid OBJECT IDENTIFIER ::= {integer-first 1}
+include-all OBJECT IDENTIFIER ::= {integer-first 1 rel-oid-1 42}
--Character strings
numericstring NumericString ::= "01234567"
@@ -41,7 +48,6 @@ objectdescriptor ObjectDescriptor ::= "ObjectDescriptor"
graphicstring GraphicString ::= "GraphicString"
generalstring GeneralString ::= "GeneralString"
bmpstring1 BMPString ::= "BMPString"
---bmpstring2 BMPString ::= [{0,0,0,66},{0,0,0,77},{0,0,0,80},{0,0,0,115},{0,0,0,116},{0,0,0,114},{0,0,0,105},{0,0,0,110},{0,0,0,103}]
latinCapitalLetterA UniversalString ::= {0,0,0,65}
greekCapitalLetterSigma UniversalString ::= {0,0,3,145}
my-universalstring UniversalString ::= {"This is a capital A: ",
@@ -50,4 +56,88 @@ my-universalstring UniversalString ::= {"This is a capital A: ",
greekCapitalLetterSigma,
"; try and spot the difference!"}
+-- Useful parameterized SEQUENCE.
+ParamSeq{Type} ::= SEQUENCE {
+ a Type
+}
+
+-- Integer values.
+IntegerSeq ::= ParamSeq{INTEGER}
+someInteger INTEGER ::= 42
+integerSeq1 IntegerSeq ::= { a otherInteger }
+otherInteger INTEGER ::= someInteger
+
+--
+-- Values from objects.
+--
+int-from-object-1 INTEGER ::= int-holder-2.&obj.&id
+int-from-object-2 INTEGER ::= int-holder-2.&id
+
+INT-HOLDER ::= CLASS {
+ &id INTEGER UNIQUE,
+ &obj INT-HOLDER OPTIONAL
+} WITH SYNTAX {
+ ID &id
+ [OBJ &obj]
+}
+
+int-holder-1 INT-HOLDER ::= { ID 2 }
+int-holder-2 INT-HOLDER ::= { ID 4 OBJ int-holder-1 }
+
+II ::= INTEGER (int-from-object-1..int-from-object-2)
+
+-- Recursive OCTET STRING definitions.
+
+OS-HOLDER ::= CLASS {
+ &id INTEGER UNIQUE,
+ &os OCTET STRING
+} WITH SYNTAX {
+ ID &id OS &os
+}
+
+os-holder-1 OS-HOLDER ::= { ID 1 OS '4041FF'H }
+
+OctetStringSeq ::= ParamSeq{OCTET STRING}
+
+someOctetString OCTET STRING ::= '404142'H
+
+octetStringSeq1 OctetStringSeq ::= { a someOctetString }
+octetStringSeq2 OctetStringSeq ::= { a otherOctetString }
+octetStringSeq3 OctetStringSeq ::= { a os-holder-1.&os }
+
+otherOctetString OCTET STRING ::= someOctetString
+
+os-1 OCTET STRING ::= os-2
+os-2 OCTET STRING ::= os-holder-1.&os
+
+-- Recursive BIT STRING definitions.
+
+BS-HOLDER ::= CLASS {
+ &id INTEGER UNIQUE,
+ &bs BIT STRING,
+ &named-bs NamedBsType
+} WITH SYNTAX {
+ ID &id BS &bs NAMED-BS &named-bs
+}
+bs-holder-1 BS-HOLDER ::= { ID 1 BS '101'B NAMED-BS {a,c} }
+
+NamedBsType ::= BIT STRING {a(0),b(1),c(2)}
+BsSeq ::= SEQUENCE {
+ a BIT STRING,
+ b NamedBsType
+}
+
+someBitString BIT STRING ::= '101101'B
+
+bsSeq1 BsSeq ::= { a someBitString, b someNamedBs }
+bsSeq2 BsSeq ::= { a otherBitString, b someOtherNamedBs }
+bsSeq3 BsSeq ::= { a bs-holder-1.&bs, b bs-holder-1.&named-bs }
+
+otherBitString BIT STRING ::= someBitString
+bsFromObjectInd BIT STRING ::= bsFromObject
+bsFromObject BIT STRING ::= bs-holder-1.&bs
+
+someOtherNamedBs NamedBsType ::= someNamedBs
+someNamedBs NamedBsType ::= {c}
+
END
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/ACSE-1.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/ACSE-1.asn1
index 3f1385323a..3f1385323a 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/ACSE-1.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/ACSE-1.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/AlgorithmInformation-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/AlgorithmInformation-2009.asn1
new file mode 100644
index 0000000000..f912966c72
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/AlgorithmInformation-2009.asn1
@@ -0,0 +1,466 @@
+AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+DEFINITIONS EXPLICIT TAGS ::=
+BEGIN
+EXPORTS ALL;
+IMPORTS
+
+KeyUsage
+FROM PKIX1Implicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1)
+ security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-implicit-02(59)} ;
+
+-- Suggested prefixes for algorithm objects are:
+--
+-- mda- Message Digest Algorithms
+-- sa- Signature Algorithms
+-- kta- Key Transport Algorithms (Asymmetric)
+-- kaa- Key Agreement Algorithms (Asymmetric)
+-- kwa- Key Wrap Algorithms (Symmetric)
+-- kda- Key Derivation Algorithms
+-- maca- Message Authentication Code Algorithms
+-- pk- Public Key
+-- cea- Content (symmetric) Encryption Algorithms
+-- cap- S/MIME Capabilities
+
+ParamOptions ::= ENUMERATED {
+ required, -- Parameters MUST be encoded in structure
+ preferredPresent, -- Parameters SHOULD be encoded in structure
+ preferredAbsent, -- Parameters SHOULD NOT be encoded in structure
+ absent, -- Parameters MUST NOT be encoded in structure
+ inheritable, -- Parameters are inherited if not present
+ optional, -- Parameters MAY be encoded in the structure
+ ...
+}
+
+-- DIGEST-ALGORITHM
+--
+-- Describes the basic information for ASN.1 and a digest
+-- algorithm.
+--
+-- &id - contains the OID identifying the digest algorithm
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+--
+-- Additional information such as the length of the hash could have
+-- been encoded. Without a clear understanding of what information
+-- is needed by applications, such extraneous information was not
+-- considered to be of sufficent importance.
+--
+-- Example:
+-- mda-sha1 DIGEST-ALGORITHM ::= {
+-- IDENTIFIER id-sha1
+-- PARAMS TYPE NULL ARE preferredAbsent
+-- }
+
+DIGEST-ALGORITHM ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [PARAMS [TYPE &Params] ARE &paramPresence ]
+}
+
+-- SIGNATURE-ALGORITHM
+--
+-- Describes the basic properties of a signature algorithm
+--
+-- &id - contains the OID identifying the signature algorithm
+-- &Value - contains a type definition for the value structure of
+-- the signature; if absent, implies that no ASN.1
+-- encoding is performed on the value
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &HashSet - The set of hash algorithms used with this
+-- signature algorithm
+-- &PublicKeySet - the set of public key algorithms for this
+-- signature algorithm
+-- &smimeCaps - contains the object describing how the S/MIME
+-- capabilities are presented.
+--
+-- Example:
+-- sig-RSA-PSS SIGNATURE-ALGORITHM ::= {
+-- IDENTIFIER id-RSASSA-PSS
+-- PARAMS TYPE RSASSA-PSS-params ARE required
+-- HASHES { mda-sha1 | mda-md5, ... }
+-- PUBLIC-KEYS { pk-rsa | pk-rsa-pss }
+-- }
+
+SIGNATURE-ALGORITHM ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Value OPTIONAL,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &HashSet DIGEST-ALGORITHM OPTIONAL,
+ &PublicKeySet PUBLIC-KEY OPTIONAL,
+ &smimeCaps SMIME-CAPS OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [VALUE &Value]
+ [PARAMS [TYPE &Params] ARE &paramPresence ]
+ [HASHES &HashSet]
+ [PUBLIC-KEYS &PublicKeySet]
+ [SMIME-CAPS &smimeCaps]
+}
+
+-- PUBLIC-KEY
+--
+-- Describes the basic properties of a public key
+--
+-- &id - contains the OID identifying the public key
+-- &KeyValue - contains the type for the key value
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &keyUsage - contains the set of bits that are legal for this
+-- key type. Note that is does not make any statement
+-- about how bits may be paired.
+-- &PrivateKey - contains a type structure for encoding the private
+-- key information.
+--
+-- Example:
+-- pk-rsa-pss PUBLIC-KEY ::= {
+-- IDENTIFIER id-RSASSA-PSS
+-- KEY RSAPublicKey
+-- PARAMS TYPE RSASSA-PSS-params ARE optional
+-- CERT-KEY-USAGE { .... }
+-- }
+
+PUBLIC-KEY ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &KeyValue OPTIONAL,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &keyUsage KeyUsage OPTIONAL,
+ &PrivateKey OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [KEY &KeyValue]
+ [PARAMS [TYPE &Params] ARE &paramPresence]
+ [CERT-KEY-USAGE &keyUsage]
+ [PRIVATE-KEY &PrivateKey]
+}
+
+-- KEY-TRANSPORT
+--
+-- Describes the basic properties of a key transport algorithm
+--
+-- &id - contains the OID identifying the key transport algorithm
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &PublicKeySet - specifies which public keys are used with
+-- this algorithm
+-- &smimeCaps - contains the object describing how the S/MIME
+-- capabilities are presented.
+--
+-- Example:
+-- kta-rsaTransport KEY-TRANSPORT ::= {
+-- IDENTIFIER &id
+-- PARAMS TYPE NULL ARE required
+-- PUBLIC-KEYS { pk-rsa | pk-rsa-pss }
+-- }
+
+KEY-TRANSPORT ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &PublicKeySet PUBLIC-KEY OPTIONAL,
+ &smimeCaps SMIME-CAPS OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [PARAMS [TYPE &Params] ARE &paramPresence]
+ [PUBLIC-KEYS &PublicKeySet]
+ [SMIME-CAPS &smimeCaps]
+}
+
+-- KEY-AGREE
+--
+-- Describes the basic properties of a key agreement algorithm
+--
+-- &id - contains the OID identifying the key agreement algorithm
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &PublicKeySet - specifies which public keys are used with
+-- this algorithm
+-- &Ukm - type of user keying material used
+-- &ukmPresence - specifies the requirements to define the UKM field
+-- &smimeCaps - contains the object describing how the S/MIME
+-- capabilities are presented.
+--
+-- Example:
+-- kaa-dh-static-ephemeral KEY-AGREE ::= {
+-- IDENTIFIER id-alg-ESDH
+-- PARAMS TYPE KeyWrapAlgorithm ARE required
+-- PUBLIC-KEYS {
+-- {IDENTIFIER dh-public-number KEY DHPublicKey
+-- PARAMS TYPE DHDomainParameters ARE inheritable }
+-- }
+-- - - UKM should be present but is not separately ASN.1-encoded
+-- UKM ARE preferredPresent
+-- }
+
+KEY-AGREE ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &PublicKeySet PUBLIC-KEY OPTIONAL,
+ &Ukm OPTIONAL,
+ &ukmPresence ParamOptions DEFAULT absent,
+ &smimeCaps SMIME-CAPS OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [PARAMS [TYPE &Params] ARE &paramPresence]
+ [PUBLIC-KEYS &PublicKeySet]
+ [UKM [TYPE &Ukm] ARE &ukmPresence]
+ [SMIME-CAPS &smimeCaps]
+}
+
+-- KEY-WRAP
+--
+-- Describes the basic properties of a key wrap algorithm
+--
+-- &id - contains the OID identifying the key wrap algorithm
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &smimeCaps - contains the object describing how the S/MIME
+-- capabilities are presented.
+--
+-- Example:
+-- kwa-cms3DESwrap KEY-WRAP ::= {
+-- IDENTIFIER id-alg-CMS3DESwrap
+-- PARAMS TYPE NULL ARE required
+-- }
+
+KEY-WRAP ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &smimeCaps SMIME-CAPS OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [PARAMS [TYPE &Params] ARE &paramPresence]
+ [SMIME-CAPS &smimeCaps]
+}
+-- KEY-DERIVATION
+--
+-- Describes the basic properties of a key derivation algorithm
+--
+-- &id - contains the OID identifying the key derivation algorithm
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &smimeCaps - contains the object describing how the S/MIME
+-- capabilities are presented.
+--
+-- Example:
+-- kda-pbkdf2 KEY-DERIVATION ::= {
+-- IDENTIFIER id-PBKDF2
+-- PARAMS TYPE PBKDF2-params ARE required
+-- }
+
+KEY-DERIVATION ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &smimeCaps SMIME-CAPS OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [PARAMS [TYPE &Params] ARE &paramPresence]
+ [SMIME-CAPS &smimeCaps]
+}
+
+-- MAC-ALGORITHM
+--
+-- Describes the basic properties of a message
+-- authentication code (MAC) algorithm
+--
+-- &id - contains the OID identifying the MAC algorithm
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &keyed - MAC algorithm is a keyed MAC algorithm
+-- &smimeCaps - contains the object describing how the S/MIME
+-- capabilities are presented.
+--
+-- Some parameters that perhaps should have been added would be
+-- fields with the minimum and maximum MAC lengths for
+-- those MAC algorithms that allow truncations.
+--
+-- Example:
+-- maca-hmac-sha1 MAC-ALGORITHM ::= {
+-- IDENTIFIER hMAC-SHA1
+-- PARAMS TYPE NULL ARE preferredAbsent
+-- IS KEYED MAC TRUE
+-- SMIME-CAPS {IDENTIFIED BY hMAC-SHA1}
+-- }
+
+MAC-ALGORITHM ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &keyed BOOLEAN,
+ &smimeCaps SMIME-CAPS OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [PARAMS [TYPE &Params] ARE &paramPresence]
+ IS-KEYED-MAC &keyed
+ [SMIME-CAPS &smimeCaps]
+}
+
+-- CONTENT-ENCRYPTION
+--
+-- Describes the basic properties of a content encryption
+-- algorithm
+--
+-- &id - contains the OID identifying the content
+-- encryption algorithm
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &smimeCaps - contains the object describing how the S/MIME
+-- capabilities are presented.
+--
+-- Example:
+-- cea-3DES-cbc CONTENT-ENCRYPTION ::= {
+-- IDENTIFIER des-ede3-cbc
+-- PARAMS TYPE IV ARE required
+-- SMIME-CAPS { IDENTIFIED BY des-ede3-cbc }
+-- }
+
+CONTENT-ENCRYPTION ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &smimeCaps SMIME-CAPS OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [PARAMS [TYPE &Params] ARE &paramPresence]
+ [SMIME-CAPS &smimeCaps]
+}
+
+-- ALGORITHM
+--
+-- Describes a generic algorithm identifier
+--
+-- &id - contains the OID identifying the algorithm
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &smimeCaps - contains the object describing how the S/MIME
+-- capabilities are presented.
+--
+-- This would be used for cases where an algorithm of an unknown
+-- type is used. In general however, one should either define
+-- a more complete algorithm structure (such as the one above)
+-- or use the TYPE-IDENTIFIER class.
+
+ALGORITHM ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &smimeCaps SMIME-CAPS OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [PARAMS [TYPE &Params] ARE &paramPresence]
+ [SMIME-CAPS &smimeCaps]
+}
+
+-- AlgorithmIdentifier
+--
+-- Provides the generic structure that is used to encode algorithm
+-- identification and the parameters associated with the
+-- algorithm.
+--
+-- The first parameter represents the type of the algorithm being
+-- used.
+-- The second parameter represents an object set containing the
+-- algorithms that may occur in this situation.
+-- The initial list of required algorithms should occur to the
+-- left of an extension marker; all other algorithms should
+-- occur to the right of an extension marker.
+--
+-- The object class ALGORITHM can be used for generic unspecified
+-- items.
+-- If new ALGORITHM classes are defined, the fields &id and &Params
+-- need to be present as fields in the object in order to use
+-- this parameterized type.
+--
+-- Example:
+-- SignatureAlgorithmIdentifier ::=
+-- AlgorithmIdentifier{SIGNATURE-ALGORITHM, {SignatureAlgSet}}
+
+AlgorithmIdentifier{ALGORITHM-TYPE, ALGORITHM-TYPE:AlgorithmSet} ::=
+ SEQUENCE {
+ algorithm ALGORITHM-TYPE.&id({AlgorithmSet}),
+ parameters ALGORITHM-TYPE.
+ &Params({AlgorithmSet}{@algorithm}) OPTIONAL
+ }
+
+-- S/MIME Capabilities
+--
+-- We have moved the SMIME-CAPS from the module for RFC 3851 to here
+-- because it is used in RFC 4262 (X.509 Certificate Extension for
+-- S/MIME Capabilities)
+--
+--
+-- This class is used to represent an S/MIME capability. S/MIME
+-- capabilities are used to represent what algorithm capabilities
+-- an individual has. The classic example was the content encryption
+-- algorithm RC2 where the algorithm id and the RC2 key lengths
+-- supported needed to be advertised, but the IV used is not fixed.
+-- Thus, for RC2 we used
+--
+-- cap-RC2CBC SMIME-CAPS ::= {
+-- TYPE INTEGER ( 40 | 128 ) IDENTIFIED BY rc2-cbc }
+--
+-- where 40 and 128 represent the RC2 key length in number of bits.
+--
+-- Another example where information needs to be shown is for
+-- RSA-OAEP where only specific hash functions or mask generation
+-- functions are supported, but the saltLength is specified by the
+-- sender and not the recipient. In this case, one can either
+-- generate a number of capability items,
+-- or a new S/MIME capability type could be generated where
+-- multiple hash functions could be specified.
+--
+--
+-- SMIME-CAP
+--
+-- This class is used to associate the type that describes the
+-- capabilities with the object identifier.
+--
+
+SMIME-CAPS ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Type OPTIONAL
+}
+WITH SYNTAX { [TYPE &Type] IDENTIFIED BY &id }
+
+--
+-- Generic type - this is used for defining values.
+--
+
+-- Define a single S/MIME capability encoding
+
+SMIMECapability{SMIME-CAPS:CapabilitySet} ::= SEQUENCE {
+ capabilityID SMIME-CAPS.&id({CapabilitySet}),
+ parameters SMIME-CAPS.&Type({CapabilitySet}
+ {@capabilityID}) OPTIONAL
+}
+
+-- Define a sequence of S/MIME capability values
+
+SMIMECapabilities { SMIME-CAPS:CapabilitySet } ::=
+ SEQUENCE SIZE (1..MAX) OF SMIMECapability{{CapabilitySet} }
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/AttributeCertificateVersion1-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/AttributeCertificateVersion1-2009.asn1
new file mode 100644
index 0000000000..46b431af40
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/AttributeCertificateVersion1-2009.asn1
@@ -0,0 +1,59 @@
+ AttributeCertificateVersion1-2009
+ {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-v1AttrCert-02(49)}
+ DEFINITIONS EXPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ SIGNATURE-ALGORITHM, ALGORITHM, AlgorithmIdentifier{}
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ AttributeSet{}, Extensions{}, EXTENSION, ATTRIBUTE
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57) }
+
+ CertificateSerialNumber, UniqueIdentifier, SIGNED{}
+ FROM PKIX1Explicit-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51) }
+
+ GeneralNames
+ FROM PKIX1Implicit-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59) }
+
+ AttCertValidityPeriod, IssuerSerial
+ FROM PKIXAttributeCertificate-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-attribute-cert-02(47) } ;
+
+ -- Definition extracted from X.509-1997 [X.509-97], but
+ -- different type names are used to avoid collisions.
+
+ AttributeCertificateV1 ::= SIGNED{AttributeCertificateInfoV1}
+
+ AttributeCertificateInfoV1 ::= SEQUENCE {
+ version AttCertVersionV1 DEFAULT v1,
+ subject CHOICE {
+ baseCertificateID [0] IssuerSerial,
+ -- associated with a Public Key Certificate
+ subjectName [1] GeneralNames },
+ -- associated with a name
+ issuer GeneralNames,
+ signature AlgorithmIdentifier{SIGNATURE-ALGORITHM, {...}},
+ serialNumber CertificateSerialNumber,
+ attCertValidityPeriod AttCertValidityPeriod,
+ attributes SEQUENCE OF AttributeSet{{AttrList}},
+ issuerUniqueID UniqueIdentifier OPTIONAL,
+ extensions Extensions{{AttributeCertExtensionsV1}} OPTIONAL }
+
+ AttCertVersionV1 ::= INTEGER { v1(0) }
+
+ AttrList ATTRIBUTE ::= {...}
+ AttributeCertExtensionsV1 EXTENSION ::= {...}
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/AuthenticationFramework.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/AuthenticationFramework.asn1
index 5cfa9062f0..5cfa9062f0 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/AuthenticationFramework.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/AuthenticationFramework.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/BasicAccessControl.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/BasicAccessControl.asn1
index d8b2b687ae..d8b2b687ae 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/BasicAccessControl.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/BasicAccessControl.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/CertificateExtensions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/CertificateExtensions.asn1
index 0daf2208e9..0daf2208e9 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/CertificateExtensions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/CertificateExtensions.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Character-Coding-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Character-Coding-Attributes.asn1
index 04060cf060..04060cf060 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Character-Coding-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Character-Coding-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Character-Presentation-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Character-Presentation-Attributes.asn1
index aed48ac26b..aed48ac26b 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Character-Presentation-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Character-Presentation-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Character-Profile-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Character-Profile-Attributes.asn1
index 7ba5bf194a..7ba5bf194a 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Character-Profile-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Character-Profile-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Colour-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Colour-Attributes.asn1
index 24c7fafc38..24c7fafc38 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Colour-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Colour-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntax-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntax-2009.asn1
new file mode 100644
index 0000000000..3e350294be
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntax-2009.asn1
@@ -0,0 +1,463 @@
+ CryptographicMessageSyntax-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549)
+ pkcs(1) pkcs-9(9) smime(16) modules(0) id-mod-cms-2004-02(41) }
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ ParamOptions, DIGEST-ALGORITHM, SIGNATURE-ALGORITHM,
+ PUBLIC-KEY, KEY-DERIVATION, KEY-WRAP, MAC-ALGORITHM,
+ KEY-AGREE, KEY-TRANSPORT, CONTENT-ENCRYPTION, ALGORITHM,
+ AlgorithmIdentifier
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+ SignatureAlgs, MessageDigestAlgs, KeyAgreementAlgs,
+ MessageAuthAlgs, KeyWrapAlgs, ContentEncryptionAlgs,
+ KeyTransportAlgs, KeyDerivationAlgs, KeyAgreePublicKeys
+ FROM CryptographicMessageSyntaxAlgorithms-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cmsalg-2001-02(37) }
+
+ Certificate, CertificateList, CertificateSerialNumber,
+ Name, ATTRIBUTE
+ FROM PKIX1Explicit-2009
+ { iso(1) identified-organization(3) dod(6) internet(1)
+ security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-explicit-02(51) }
+
+ AttributeCertificate
+ FROM PKIXAttributeCertificate-2009
+ { iso(1) identified-organization(3) dod(6) internet(1)
+ security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-attribute-cert-02(47) }
+
+ AttributeCertificateV1
+ FROM AttributeCertificateVersion1-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-v1AttrCert-02(49) } ;
+
+ -- Cryptographic Message Syntax
+
+ -- The following are used for version numbers using the ASN.1
+ -- idiom "[[n:"
+ -- Version 1 = PKCS #7
+ -- Version 2 = S/MIME V2
+ -- Version 3 = RFC 2630
+ -- Version 4 = RFC 3369
+ -- Version 5 = RFC 3852
+
+ CONTENT-TYPE ::= TYPE-IDENTIFIER
+ ContentType ::= CONTENT-TYPE.&id
+
+ ContentInfo ::= SEQUENCE {
+ contentType CONTENT-TYPE.
+ &id({ContentSet}),
+ content [0] EXPLICIT CONTENT-TYPE.
+ &Type({ContentSet}{@contentType})}
+
+ ContentSet CONTENT-TYPE ::= {
+ -- Define the set of content types to be recognized.
+ ct-Data | ct-SignedData | ct-EncryptedData | ct-EnvelopedData |
+ ct-AuthenticatedData | ct-DigestedData, ... }
+
+ SignedData ::= SEQUENCE {
+ version CMSVersion,
+ digestAlgorithms SET OF DigestAlgorithmIdentifier,
+ encapContentInfo EncapsulatedContentInfo,
+ certificates [0] IMPLICIT CertificateSet OPTIONAL,
+ crls [1] IMPLICIT RevocationInfoChoices OPTIONAL,
+ signerInfos SignerInfos }
+
+ SignerInfos ::= SET OF SignerInfo
+
+ EncapsulatedContentInfo ::= SEQUENCE {
+ eContentType CONTENT-TYPE.&id({ContentSet}),
+ eContent [0] EXPLICIT OCTET STRING
+ ( CONTAINING CONTENT-TYPE.
+ &Type({ContentSet}{@eContentType})) OPTIONAL }
+
+ SignerInfo ::= SEQUENCE {
+ version CMSVersion,
+ sid SignerIdentifier,
+ digestAlgorithm DigestAlgorithmIdentifier,
+ signedAttrs [0] IMPLICIT SignedAttributes OPTIONAL,
+ signatureAlgorithm SignatureAlgorithmIdentifier,
+ signature SignatureValue,
+ unsignedAttrs [1] IMPLICIT Attributes
+ {{UnsignedAttributes}} OPTIONAL }
+
+ SignedAttributes ::= Attributes {{ SignedAttributesSet }}
+
+ SignerIdentifier ::= CHOICE {
+ issuerAndSerialNumber IssuerAndSerialNumber,
+ ...,
+ [[3: subjectKeyIdentifier [0] SubjectKeyIdentifier ]] }
+
+ SignedAttributesSet ATTRIBUTE ::=
+ { aa-signingTime | aa-messageDigest | aa-contentType, ... }
+
+ UnsignedAttributes ATTRIBUTE ::= { aa-countersignature, ... }
+
+ SignatureValue ::= OCTET STRING
+
+ EnvelopedData ::= SEQUENCE {
+ version CMSVersion,
+ originatorInfo [0] IMPLICIT OriginatorInfo OPTIONAL,
+ recipientInfos RecipientInfos,
+ encryptedContentInfo EncryptedContentInfo,
+ ...,
+ [[2: unprotectedAttrs [1] IMPLICIT Attributes
+ {{ UnprotectedAttributes }} OPTIONAL ]] }
+
+ OriginatorInfo ::= SEQUENCE {
+ certs [0] IMPLICIT CertificateSet OPTIONAL,
+ crls [1] IMPLICIT RevocationInfoChoices OPTIONAL }
+
+ RecipientInfos ::= SET SIZE (1..MAX) OF RecipientInfo
+
+ EncryptedContentInfo ::= SEQUENCE {
+ contentType CONTENT-TYPE.&id({ContentSet}),
+ contentEncryptionAlgorithm ContentEncryptionAlgorithmIdentifier,
+ encryptedContent [0] IMPLICIT OCTET STRING OPTIONAL }
+
+ -- If you want to do constraints, you might use:
+ -- EncryptedContentInfo ::= SEQUENCE {
+ -- contentType CONTENT-TYPE.&id({ContentSet}),
+ -- contentEncryptionAlgorithm ContentEncryptionAlgorithmIdentifier,
+ -- encryptedContent [0] IMPLICIT ENCRYPTED {CONTENT-TYPE.
+ -- &Type({ContentSet}{@contentType}) OPTIONAL }
+ -- ENCRYPTED {ToBeEncrypted} ::= OCTET STRING ( CONSTRAINED BY
+ -- { ToBeEncrypted } )
+
+ UnprotectedAttributes ATTRIBUTE ::= { ... }
+
+ RecipientInfo ::= CHOICE {
+ ktri KeyTransRecipientInfo,
+ ...,
+ [[3: kari [1] KeyAgreeRecipientInfo ]],
+ [[4: kekri [2] KEKRecipientInfo]],
+ [[5: pwri [3] PasswordRecipientInfo,
+ ori [4] OtherRecipientInfo ]] }
+
+ EncryptedKey ::= OCTET STRING
+
+ KeyTransRecipientInfo ::= SEQUENCE {
+ version CMSVersion, -- always set to 0 or 2
+ rid RecipientIdentifier,
+ keyEncryptionAlgorithm AlgorithmIdentifier
+ {KEY-TRANSPORT, {KeyTransportAlgorithmSet}},
+ encryptedKey EncryptedKey }
+
+ KeyTransportAlgorithmSet KEY-TRANSPORT ::= { KeyTransportAlgs, ... }
+
+ RecipientIdentifier ::= CHOICE {
+ issuerAndSerialNumber IssuerAndSerialNumber,
+ ...,
+ [[2: subjectKeyIdentifier [0] SubjectKeyIdentifier ]] }
+ KeyAgreeRecipientInfo ::= SEQUENCE {
+ version CMSVersion, -- always set to 3
+ originator [0] EXPLICIT OriginatorIdentifierOrKey,
+ ukm [1] EXPLICIT UserKeyingMaterial OPTIONAL,
+ keyEncryptionAlgorithm AlgorithmIdentifier
+ {KEY-AGREE, {KeyAgreementAlgorithmSet}},
+ recipientEncryptedKeys RecipientEncryptedKeys }
+
+ KeyAgreementAlgorithmSet KEY-AGREE ::= { KeyAgreementAlgs, ... }
+
+ OriginatorIdentifierOrKey ::= CHOICE {
+ issuerAndSerialNumber IssuerAndSerialNumber,
+ subjectKeyIdentifier [0] SubjectKeyIdentifier,
+ originatorKey [1] OriginatorPublicKey }
+
+ OriginatorPublicKey ::= SEQUENCE {
+ algorithm AlgorithmIdentifier {PUBLIC-KEY, {OriginatorKeySet}},
+ publicKey BIT STRING }
+
+ OriginatorKeySet PUBLIC-KEY ::= { KeyAgreePublicKeys, ... }
+
+ RecipientEncryptedKeys ::= SEQUENCE OF RecipientEncryptedKey
+
+ RecipientEncryptedKey ::= SEQUENCE {
+ rid KeyAgreeRecipientIdentifier,
+ encryptedKey EncryptedKey }
+
+ KeyAgreeRecipientIdentifier ::= CHOICE {
+ issuerAndSerialNumber IssuerAndSerialNumber,
+ rKeyId [0] IMPLICIT RecipientKeyIdentifier }
+
+ RecipientKeyIdentifier ::= SEQUENCE {
+ subjectKeyIdentifier SubjectKeyIdentifier,
+ date GeneralizedTime OPTIONAL,
+ other OtherKeyAttribute OPTIONAL }
+
+ SubjectKeyIdentifier ::= OCTET STRING
+
+ KEKRecipientInfo ::= SEQUENCE {
+ version CMSVersion, -- always set to 4
+ kekid KEKIdentifier,
+ keyEncryptionAlgorithm KeyEncryptionAlgorithmIdentifier,
+ encryptedKey EncryptedKey }
+
+ KEKIdentifier ::= SEQUENCE {
+ keyIdentifier OCTET STRING,
+ date GeneralizedTime OPTIONAL,
+ other OtherKeyAttribute OPTIONAL }
+ PasswordRecipientInfo ::= SEQUENCE {
+ version CMSVersion, -- always set to 0
+ keyDerivationAlgorithm [0] KeyDerivationAlgorithmIdentifier
+ OPTIONAL,
+ keyEncryptionAlgorithm KeyEncryptionAlgorithmIdentifier,
+ encryptedKey EncryptedKey }
+
+ OTHER-RECIPIENT ::= TYPE-IDENTIFIER
+
+ OtherRecipientInfo ::= SEQUENCE {
+ oriType OTHER-RECIPIENT.
+ &id({SupportedOtherRecipInfo}),
+ oriValue OTHER-RECIPIENT.
+ &Type({SupportedOtherRecipInfo}{@oriType})}
+
+ SupportedOtherRecipInfo OTHER-RECIPIENT ::= { ... }
+
+ DigestedData ::= SEQUENCE {
+ version CMSVersion,
+ digestAlgorithm DigestAlgorithmIdentifier,
+ encapContentInfo EncapsulatedContentInfo,
+ digest Digest, ... }
+
+ Digest ::= OCTET STRING
+
+ EncryptedData ::= SEQUENCE {
+ version CMSVersion,
+ encryptedContentInfo EncryptedContentInfo,
+ ...,
+ [[2: unprotectedAttrs [1] IMPLICIT Attributes
+ {{UnprotectedAttributes}} OPTIONAL ]] }
+
+ AuthenticatedData ::= SEQUENCE {
+ version CMSVersion,
+ originatorInfo [0] IMPLICIT OriginatorInfo OPTIONAL,
+ recipientInfos RecipientInfos,
+ macAlgorithm MessageAuthenticationCodeAlgorithm,
+ digestAlgorithm [1] DigestAlgorithmIdentifier OPTIONAL,
+ encapContentInfo EncapsulatedContentInfo,
+ authAttrs [2] IMPLICIT AuthAttributes OPTIONAL,
+ mac MessageAuthenticationCode,
+ unauthAttrs [3] IMPLICIT UnauthAttributes OPTIONAL }
+
+ AuthAttributes ::= SET SIZE (1..MAX) OF Attribute
+ {{AuthAttributeSet}}
+
+ AuthAttributeSet ATTRIBUTE ::= { aa-contentType | aa-messageDigest
+ | aa-signingTime, ...}
+ MessageAuthenticationCode ::= OCTET STRING
+
+ UnauthAttributes ::= SET SIZE (1..MAX) OF Attribute
+ {{UnauthAttributeSet}}
+
+ UnauthAttributeSet ATTRIBUTE ::= {...}
+
+ --
+ -- General algorithm definitions
+ --
+
+ DigestAlgorithmIdentifier ::= AlgorithmIdentifier
+ {DIGEST-ALGORITHM, {DigestAlgorithmSet}}
+
+ DigestAlgorithmSet DIGEST-ALGORITHM ::= {
+ CryptographicMessageSyntaxAlgorithms-2009.MessageDigestAlgs, ... }
+
+ SignatureAlgorithmIdentifier ::= AlgorithmIdentifier
+ {SIGNATURE-ALGORITHM, {SignatureAlgorithmSet}}
+
+ SignatureAlgorithmSet SIGNATURE-ALGORITHM ::=
+ { SignatureAlgs, ... }
+
+ KeyEncryptionAlgorithmIdentifier ::= AlgorithmIdentifier
+ {KEY-WRAP, {KeyEncryptionAlgorithmSet}}
+
+ KeyEncryptionAlgorithmSet KEY-WRAP ::= { KeyWrapAlgs, ... }
+
+ ContentEncryptionAlgorithmIdentifier ::= AlgorithmIdentifier
+ {CONTENT-ENCRYPTION, {ContentEncryptionAlgorithmSet}}
+
+ ContentEncryptionAlgorithmSet CONTENT-ENCRYPTION ::=
+ { ContentEncryptionAlgs, ... }
+
+ MessageAuthenticationCodeAlgorithm ::= AlgorithmIdentifier
+ {MAC-ALGORITHM, {MessageAuthenticationCodeAlgorithmSet}}
+
+ MessageAuthenticationCodeAlgorithmSet MAC-ALGORITHM ::=
+ { MessageAuthAlgs, ... }
+
+ KeyDerivationAlgorithmIdentifier ::= AlgorithmIdentifier
+ {KEY-DERIVATION, {KeyDerivationAlgs, ...}}
+
+ RevocationInfoChoices ::= SET OF RevocationInfoChoice
+
+ RevocationInfoChoice ::= CHOICE {
+ crl CertificateList,
+ ...,
+ [[5: other [1] IMPLICIT OtherRevocationInfoFormat ]] }
+
+ OTHER-REVOK-INFO ::= TYPE-IDENTIFIER
+
+ OtherRevocationInfoFormat ::= SEQUENCE {
+ otherRevInfoFormat OTHER-REVOK-INFO.
+ &id({SupportedOtherRevokInfo}),
+ otherRevInfo OTHER-REVOK-INFO.
+ &Type({SupportedOtherRevokInfo}{@otherRevInfoFormat})}
+
+ SupportedOtherRevokInfo OTHER-REVOK-INFO ::= { ... }
+
+ CertificateChoices ::= CHOICE {
+ certificate Certificate,
+ extendedCertificate [0] IMPLICIT ExtendedCertificate,
+ -- Obsolete
+ ...,
+ [[3: v1AttrCert [1] IMPLICIT AttributeCertificateV1]],
+ -- Obsolete
+ [[4: v2AttrCert [2] IMPLICIT AttributeCertificateV2]],
+ [[5: other [3] IMPLICIT OtherCertificateFormat]] }
+
+ AttributeCertificateV2 ::= AttributeCertificate
+
+ OTHER-CERT-FMT ::= TYPE-IDENTIFIER
+
+ OtherCertificateFormat ::= SEQUENCE {
+ otherCertFormat OTHER-CERT-FMT.
+ &id({SupportedCertFormats}),
+ otherCert OTHER-CERT-FMT.
+ &Type({SupportedCertFormats}{@otherCertFormat})}
+
+ SupportedCertFormats OTHER-CERT-FMT ::= { ... }
+
+ CertificateSet ::= SET OF CertificateChoices
+
+ IssuerAndSerialNumber ::= SEQUENCE {
+ issuer Name,
+ serialNumber CertificateSerialNumber }
+
+ CMSVersion ::= INTEGER { v0(0), v1(1), v2(2), v3(3), v4(4), v5(5) }
+
+ UserKeyingMaterial ::= OCTET STRING
+
+ KEY-ATTRIBUTE ::= TYPE-IDENTIFIER
+
+ OtherKeyAttribute ::= SEQUENCE {
+ keyAttrId KEY-ATTRIBUTE.
+
+ &id({SupportedKeyAttributes}),
+ keyAttr KEY-ATTRIBUTE.
+ &Type({SupportedKeyAttributes}{@keyAttrId})}
+
+ SupportedKeyAttributes KEY-ATTRIBUTE ::= { ... }
+
+ -- Content Type Object Identifiers
+
+ id-ct-contentInfo OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs9(9) smime(16) ct(1) 6 }
+
+ ct-Data CONTENT-TYPE ::= {OCTET STRING IDENTIFIED BY id-data}
+
+ id-data OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs7(7) 1 }
+
+ ct-SignedData CONTENT-TYPE ::=
+ { SignedData IDENTIFIED BY id-signedData}
+
+ id-signedData OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs7(7) 2 }
+
+ ct-EnvelopedData CONTENT-TYPE ::=
+ { EnvelopedData IDENTIFIED BY id-envelopedData}
+
+ id-envelopedData OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs7(7) 3 }
+
+ ct-DigestedData CONTENT-TYPE ::=
+ { DigestedData IDENTIFIED BY id-digestedData}
+
+ id-digestedData OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs7(7) 5 }
+
+ ct-EncryptedData CONTENT-TYPE ::=
+ { EncryptedData IDENTIFIED BY id-encryptedData}
+
+ id-encryptedData OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs7(7) 6 }
+
+ ct-AuthenticatedData CONTENT-TYPE ::=
+ { AuthenticatedData IDENTIFIED BY id-ct-authData}
+
+ id-ct-authData OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) ct(1) 2 }
+
+ --
+ -- The CMS Attributes
+ --
+
+ MessageDigest ::= OCTET STRING
+
+ SigningTime ::= Time
+
+ Time ::= CHOICE {
+ utcTime UTCTime,
+ generalTime GeneralizedTime }
+
+ Countersignature ::= SignerInfo
+
+ -- Attribute Object Identifiers
+
+ aa-contentType ATTRIBUTE ::=
+ { TYPE ContentType IDENTIFIED BY id-contentType }
+ id-contentType OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs9(9) 3 }
+
+ aa-messageDigest ATTRIBUTE ::=
+ { TYPE MessageDigest IDENTIFIED BY id-messageDigest}
+ id-messageDigest OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs9(9) 4 }
+
+ aa-signingTime ATTRIBUTE ::=
+ { TYPE SigningTime IDENTIFIED BY id-signingTime }
+ id-signingTime OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs9(9) 5 }
+
+ aa-countersignature ATTRIBUTE ::=
+ { TYPE Countersignature IDENTIFIED BY id-countersignature }
+ id-countersignature OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs9(9) 6 }
+
+ --
+ -- Obsolete Extended Certificate syntax from PKCS#6
+ --
+
+ ExtendedCertificateOrCertificate ::= CHOICE {
+ certificate Certificate,
+ extendedCertificate [0] IMPLICIT ExtendedCertificate }
+
+ ExtendedCertificate ::= SEQUENCE {
+ extendedCertificateInfo ExtendedCertificateInfo,
+ signatureAlgorithm SignatureAlgorithmIdentifier,
+ signature Signature }
+
+ ExtendedCertificateInfo ::= SEQUENCE {
+ version CMSVersion,
+ certificate Certificate,
+ attributes UnauthAttributes }
+
+ Signature ::= BIT STRING
+
+ Attribute{ ATTRIBUTE:AttrList } ::= SEQUENCE {
+ attrType ATTRIBUTE.
+ &id({AttrList}),
+ attrValues SET OF ATTRIBUTE.
+ &Type({AttrList}{@attrType}) }
+
+ Attributes { ATTRIBUTE:AttrList } ::=
+ SET SIZE (1..MAX) OF Attribute {{ AttrList }}
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntaxAlgorithms-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntaxAlgorithms-2009.asn1
new file mode 100644
index 0000000000..72e8b270db
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntaxAlgorithms-2009.asn1
@@ -0,0 +1,248 @@
+ CryptographicMessageSyntaxAlgorithms-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cmsalg-2001-02(37) }
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ ParamOptions, DIGEST-ALGORITHM, SIGNATURE-ALGORITHM,
+ PUBLIC-KEY, KEY-DERIVATION, KEY-WRAP, MAC-ALGORITHM,
+ KEY-AGREE, KEY-TRANSPORT, CONTENT-ENCRYPTION, ALGORITHM,
+ AlgorithmIdentifier{}, SMIME-CAPS
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ pk-rsa, pk-dh, pk-dsa, rsaEncryption, DHPublicKey, dhpublicnumber
+ FROM PKIXAlgs-2009
+ {iso(1) identified-organization(3) dod(6)
+ internet(1) security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-algorithms2008-02(56)}
+
+ cap-RC2CBC
+ FROM SecureMimeMessageV3dot1-2009
+ {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-msg-v3dot1-02(39)};
+
+ -- 2. Hash algorithms in this document
+
+ MessageDigestAlgs DIGEST-ALGORITHM ::= {
+ -- mda-md5 | mda-sha1,
+ ... }
+
+ -- 3. Signature algorithms in this document
+
+ SignatureAlgs SIGNATURE-ALGORITHM ::= {
+ -- See RFC 3279
+ -- sa-dsaWithSHA1 | sa-rsaWithMD5 | sa-rsaWithSHA1,
+ ... }
+
+ -- 4. Key Management Algorithms
+ -- 4.1 Key Agreement Algorithms
+
+ KeyAgreementAlgs KEY-AGREE ::= { kaa-esdh | kaa-ssdh, ...}
+ KeyAgreePublicKeys PUBLIC-KEY ::= { pk-dh, ...}
+
+ -- 4.2 Key Transport Algorithms
+
+ KeyTransportAlgs KEY-TRANSPORT ::= { kt-rsa, ... }
+
+ -- 4.3 Symmetric Key-Encryption Key Algorithms
+
+ KeyWrapAlgs KEY-WRAP ::= { kwa-3DESWrap | kwa-RC2Wrap, ... }
+
+ -- 4.4 Key Derivation Algorithms
+
+ KeyDerivationAlgs KEY-DERIVATION ::= { kda-PBKDF2, ... }
+
+ -- 5. Content Encryption Algorithms
+
+ ContentEncryptionAlgs CONTENT-ENCRYPTION ::=
+ { cea-3DES-cbc | cea-RC2-cbc, ... }
+
+ -- 6. Message Authentication Code Algorithms
+
+ MessageAuthAlgs MAC-ALGORITHM ::= { maca-hMAC-SHA1, ... }
+
+ -- S/MIME Capabilities for these items
+
+ SMimeCaps SMIME-CAPS ::= {
+ kaa-esdh.&smimeCaps |
+ kaa-ssdh.&smimeCaps |
+ kt-rsa.&smimeCaps |
+ kwa-3DESWrap.&smimeCaps |
+ kwa-RC2Wrap.&smimeCaps |
+ cea-3DES-cbc.&smimeCaps |
+ cea-RC2-cbc.&smimeCaps |
+ maca-hMAC-SHA1.&smimeCaps,
+ ...}
+
+ --
+ --
+ --
+
+ -- Algorithm Identifiers
+
+ -- rsaEncryption OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ -- us(840) rsadsi(113549) pkcs(1) pkcs-1(1) 1 }
+
+ id-alg-ESDH OBJECT IDENTIFIER ::= { iso(1) member-body(2) us(840)
+ rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) alg(3) 5 }
+
+ id-alg-SSDH OBJECT IDENTIFIER ::= { iso(1) member-body(2) us(840)
+ rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) alg(3) 10 }
+
+ id-alg-CMS3DESwrap OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) alg(3) 6 }
+
+ id-alg-CMSRC2wrap OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) alg(3) 7 }
+
+ des-ede3-cbc OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) encryptionAlgorithm(3) 7 }
+
+ rc2-cbc OBJECT IDENTIFIER ::= { iso(1) member-body(2) us(840)
+ rsadsi(113549) encryptionAlgorithm(3) 2 }
+
+ hMAC-SHA1 OBJECT IDENTIFIER ::= { iso(1) identified-organization(3)
+ dod(6) internet(1) security(5) mechanisms(5) 8 1 2 }
+
+ id-PBKDF2 OBJECT IDENTIFIER ::= { iso(1) member-body(2) us(840)
+ rsadsi(113549) pkcs(1) pkcs-5(5) 12 }
+
+ -- Algorithm Identifier Parameter Types
+
+ KeyWrapAlgorithm ::=
+ AlgorithmIdentifier {KEY-WRAP, {KeyWrapAlgs }}
+
+ RC2wrapParameter ::= RC2ParameterVersion
+ RC2ParameterVersion ::= INTEGER
+
+ CBCParameter ::= IV
+
+ IV ::= OCTET STRING -- exactly 8 octets
+
+ RC2CBCParameter ::= SEQUENCE {
+ rc2ParameterVersion INTEGER (1..256),
+ iv OCTET STRING } -- exactly 8 octets
+
+ maca-hMAC-SHA1 MAC-ALGORITHM ::= {
+ IDENTIFIER hMAC-SHA1
+ PARAMS TYPE NULL ARE preferredAbsent
+ IS-KEYED-MAC TRUE
+ SMIME-CAPS {IDENTIFIED BY hMAC-SHA1}
+ }
+
+ PBKDF2-PRFsAlgorithmIdentifier ::= AlgorithmIdentifier{ ALGORITHM,
+ {PBKDF2-PRFs} }
+
+ alg-hMAC-SHA1 ALGORITHM ::=
+ { IDENTIFIER hMAC-SHA1 PARAMS TYPE NULL ARE required }
+
+ PBKDF2-PRFs ALGORITHM ::= { alg-hMAC-SHA1, ... }
+
+ PBKDF2-SaltSources ALGORITHM ::= { ... }
+
+ PBKDF2-SaltSourcesAlgorithmIdentifier ::=
+ AlgorithmIdentifier {ALGORITHM, {PBKDF2-SaltSources}}
+
+ defaultPBKDF2 PBKDF2-PRFsAlgorithmIdentifier ::=
+ { algorithm alg-hMAC-SHA1.&id, parameters NULL:NULL }
+
+ PBKDF2-params ::= SEQUENCE {
+ salt CHOICE {
+ specified OCTET STRING,
+ otherSource PBKDF2-SaltSourcesAlgorithmIdentifier },
+ iterationCount INTEGER (1..MAX),
+ keyLength INTEGER (1..MAX) OPTIONAL,
+ prf PBKDF2-PRFsAlgorithmIdentifier DEFAULT
+ defaultPBKDF2
+ }
+
+ --
+ -- This object is included for completeness. It should not be used
+ -- for encoding of signatures, but was sometimes used in older
+ -- versions of CMS for encoding of RSA signatures.
+ --
+ --
+ -- sa-rsa SIGNATURE-ALGORITHM ::= {
+ -- IDENTIFIER rsaEncryption
+ -- - - value is not ASN.1 encoded
+ -- PARAMS TYPE NULL ARE required
+ -- HASHES {mda-sha1 | mda-md5, ...}
+ -- PUBLIC-KEYS { pk-rsa}
+ -- }
+ --
+ -- No ASN.1 encoding is applied to the signature value
+ -- for these items
+
+ kaa-esdh KEY-AGREE ::= {
+ IDENTIFIER id-alg-ESDH
+ PARAMS TYPE KeyWrapAlgorithm ARE required
+ PUBLIC-KEYS { pk-dh }
+ -- UKM is not ASN.1 encoded
+ UKM ARE optional
+ SMIME-CAPS {TYPE KeyWrapAlgorithm IDENTIFIED BY id-alg-ESDH}
+ }
+
+ kaa-ssdh KEY-AGREE ::= {
+ IDENTIFIER id-alg-SSDH
+ PARAMS TYPE KeyWrapAlgorithm ARE required
+ PUBLIC-KEYS {pk-dh}
+ -- UKM is not ASN.1 encoded
+ UKM ARE optional
+ SMIME-CAPS {TYPE KeyWrapAlgorithm IDENTIFIED BY id-alg-SSDH}
+ }
+
+ dh-public-number OBJECT IDENTIFIER ::= dhpublicnumber
+
+ pk-originator-dh PUBLIC-KEY ::= {
+ IDENTIFIER dh-public-number
+ KEY DHPublicKey
+ PARAMS ARE absent
+ CERT-KEY-USAGE {keyAgreement, encipherOnly, decipherOnly}
+ }
+
+ kwa-3DESWrap KEY-WRAP ::= {
+ IDENTIFIER id-alg-CMS3DESwrap
+ PARAMS TYPE NULL ARE required
+ SMIME-CAPS {IDENTIFIED BY id-alg-CMS3DESwrap}
+ }
+
+ kwa-RC2Wrap KEY-WRAP ::= {
+ IDENTIFIER id-alg-CMSRC2wrap
+ PARAMS TYPE RC2wrapParameter ARE required
+ SMIME-CAPS { IDENTIFIED BY id-alg-CMSRC2wrap }
+ }
+
+ kda-PBKDF2 KEY-DERIVATION ::= {
+ IDENTIFIER id-PBKDF2
+ PARAMS TYPE PBKDF2-params ARE required
+ -- No S/MIME caps defined
+ }
+
+ cea-3DES-cbc CONTENT-ENCRYPTION ::= {
+ IDENTIFIER des-ede3-cbc
+ PARAMS TYPE IV ARE required
+ SMIME-CAPS { IDENTIFIED BY des-ede3-cbc }
+ }
+
+ cea-RC2-cbc CONTENT-ENCRYPTION ::= {
+ IDENTIFIER rc2-cbc
+ PARAMS TYPE RC2CBCParameter ARE required
+ SMIME-CAPS cap-RC2CBC
+ }
+
+ kt-rsa KEY-TRANSPORT ::= {
+ IDENTIFIER rsaEncryption
+ PARAMS TYPE NULL ARE required
+ PUBLIC-KEYS { pk-rsa }
+ SMIME-CAPS {IDENTIFIED BY rsaEncryption}
+ }
+
+ -- S/MIME Capabilities - most have no label.
+
+ cap-3DESwrap SMIME-CAPS ::= { IDENTIFIED BY id-alg-CMS3DESwrap }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DOR-definition.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DOR-definition.asn1
index cd3330dc56..cd3330dc56 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DOR-definition.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DOR-definition.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DSAOperationalAttributeTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DSAOperationalAttributeTypes.asn1
index df5e8489ea..df5e8489ea 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DSAOperationalAttributeTypes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DSAOperationalAttributeTypes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Default-Value-Lists.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Default-Value-Lists.asn1
index ef1187ba8c..ef1187ba8c 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Default-Value-Lists.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Default-Value-Lists.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryAbstractService.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryAbstractService.asn1
index 5a5d310729..5a5d310729 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryAbstractService.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryAbstractService.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryAccessProtocol.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryAccessProtocol.asn1
index 10d6979f6d..10d6979f6d 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryAccessProtocol.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryAccessProtocol.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryInformationShadowProtocol.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryInformationShadowProtocol.asn1
index 91c0a865f7..91c0a865f7 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryInformationShadowProtocol.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryInformationShadowProtocol.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryOperationalBindingManagementProtocol.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryOperationalBindingManagementProtocol.asn1
index e3e1f95621..e3e1f95621 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryOperationalBindingManagementProtocol.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryOperationalBindingManagementProtocol.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryOperationalBindingTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryOperationalBindingTypes.asn1
index 9df5d2783a..9df5d2783a 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryOperationalBindingTypes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryOperationalBindingTypes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryProtectionMappings.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryProtectionMappings.asn1
index 37c6cac261..37c6cac261 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryProtectionMappings.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryProtectionMappings.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryShadowAbstractService.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryShadowAbstractService.asn1
index acbb692b6f..acbb692b6f 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryShadowAbstractService.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryShadowAbstractService.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectorySystemProtocol.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectorySystemProtocol.asn1
index cace79d109..cace79d109 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DirectorySystemProtocol.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectorySystemProtocol.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DistributedOperations.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DistributedOperations.asn1
index 72e791f10c..72e791f10c 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DistributedOperations.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DistributedOperations.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Document-Profile-Descriptor.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Document-Profile-Descriptor.asn1
index d8c15b7afa..d8c15b7afa 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Document-Profile-Descriptor.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Document-Profile-Descriptor.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/EnhancedSecurity.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/EnhancedSecurity.asn1
index 9991a59454..9991a59454 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/EnhancedSecurity.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/EnhancedSecurity.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/EnrollmentMessageSyntax-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/EnrollmentMessageSyntax-2009.asn1
new file mode 100644
index 0000000000..17a45a0a6b
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/EnrollmentMessageSyntax-2009.asn1
@@ -0,0 +1,543 @@
+ EnrollmentMessageSyntax-2009
+ {iso(1) identified-organization(3) dod(6) internet(1)
+ security(5) mechanisms(5) pkix(7) id-mod(0) id-mod-cmc2002-02(53)}
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ EXPORTS ALL;
+ IMPORTS
+
+ AttributeSet{}, Extension{}, EXTENSION, ATTRIBUTE
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57)}
+ AlgorithmIdentifier{}, DIGEST-ALGORITHM, KEY-WRAP, KEY-DERIVATION,
+ MAC-ALGORITHM, SIGNATURE-ALGORITHM, PUBLIC-KEY
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ CertificateSerialNumber, GeneralName, CRLReason, ReasonFlags,
+ CertExtensions
+ FROM PKIX1Implicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)}
+
+ Name, id-pkix, PublicKeyAlgorithms, SignatureAlgorithms
+ FROM PKIX1Explicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51)}
+
+ ContentInfo, IssuerAndSerialNumber, CONTENT-TYPE
+ FROM CryptographicMessageSyntax-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cms-2004-02(41)}
+
+ CertReqMsg, PKIPublicationInfo, CertTemplate
+ FROM PKIXCRMF-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-crmf2005-02(55)}
+
+ mda-sha1
+ FROM PKIXAlgs-2009
+ { iso(1) identified-organization(3) dod(6)
+ internet(1) security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-algorithms2008-02(56)}
+
+ kda-PBKDF2, maca-hMAC-SHA1
+ FROM CryptographicMessageSyntaxAlgorithms-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cmsalg-2001-02(37) }
+
+ mda-sha256
+ FROM PKIX1-PSS-OAEP-Algorithms-2009
+ { iso(1) identified-organization(3) dod(6)
+ internet(1) security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-rsa-pkalgs-02(54) } ;
+
+ -- CMS Content types defined in this document
+ CMC-ContentTypes CONTENT-TYPE ::= { ct-PKIData | ct-PKIResponse, ... }
+
+ -- Signature Algorithms defined in this document
+
+ SignatureAlgs SIGNATURE-ALGORITHM ::= { sa-noSignature }
+
+ -- CMS Unsigned Attributes
+
+ CMC-UnsignedAtts ATTRIBUTE ::= { aa-cmc-unsignedData }
+
+ --
+ --
+
+ id-cmc OBJECT IDENTIFIER ::= {id-pkix 7} -- CMC controls
+ id-cct OBJECT IDENTIFIER ::= {id-pkix 12} -- CMC content types
+
+ -- This is the content type for a request message in the protocol
+
+ ct-PKIData CONTENT-TYPE ::=
+ { PKIData IDENTIFIED BY id-cct-PKIData }
+ id-cct-PKIData OBJECT IDENTIFIER ::= { id-cct 2 }
+
+ PKIData ::= SEQUENCE {
+ controlSequence SEQUENCE SIZE(0..MAX) OF TaggedAttribute,
+ reqSequence SEQUENCE SIZE(0..MAX) OF TaggedRequest,
+ cmsSequence SEQUENCE SIZE(0..MAX) OF TaggedContentInfo,
+ otherMsgSequence SEQUENCE SIZE(0..MAX) OF OtherMsg
+ }
+
+ BodyPartID ::= INTEGER(0..4294967295)
+
+ TaggedAttribute ::= SEQUENCE {
+ bodyPartID BodyPartID,
+ attrType CMC-CONTROL.&id({Cmc-Control-Set}),
+ attrValues SET OF CMC-CONTROL.
+ &Type({Cmc-Control-Set}{@attrType})
+ }
+
+ Cmc-Control-Set CMC-CONTROL ::= {
+ cmc-identityProof | cmc-dataReturn | cmc-regInfo |
+ cmc-responseInfo | cmc-queryPending | cmc-popLinkRandom |
+ cmc-popLinkWitness | cmc-identification | cmc-transactionId |
+ cmc-senderNonce | cmc-recipientNonce | cmc-statusInfo |
+ cmc-addExtensions | cmc-encryptedPOP | cmc-decryptedPOP |
+ cmc-lraPOPWitness | cmc-getCert | cmc-getCRL |
+ cmc-revokeRequest | cmc-confirmCertAcceptance |
+ cmc-statusInfoV2 | cmc-trustedAnchors | cmc-authData |
+ cmc-batchRequests | cmc-batchResponses | cmc-publishCert |
+ cmc-modCertTemplate | cmc-controlProcessed |
+ cmc-identityProofV2 | cmc-popLinkWitnessV2, ... }
+
+ OTHER-REQUEST ::= TYPE-IDENTIFIER
+
+ -- We do not define any other requests in this document;
+ -- examples might be attribute certification requests
+
+ OtherRequests OTHER-REQUEST ::= {...}
+
+ TaggedRequest ::= CHOICE {
+ tcr [0] TaggedCertificationRequest,
+ crm [1] CertReqMsg,
+ orm [2] SEQUENCE {
+ bodyPartID BodyPartID,
+ requestMessageType OTHER-REQUEST.&id({OtherRequests}),
+ requestMessageValue OTHER-REQUEST.&Type({OtherRequests}
+ {@.requestMessageType})
+ }
+ }
+
+ TaggedCertificationRequest ::= SEQUENCE {
+ bodyPartID BodyPartID,
+ certificationRequest CertificationRequest
+ }
+
+ AttributeList ATTRIBUTE ::= {at-extension-req, ...}
+
+ CertificationRequest ::= SEQUENCE {
+ certificationRequestInfo SEQUENCE {
+ version INTEGER,
+ subject Name,
+ subjectPublicKeyInfo SEQUENCE {
+ algorithm AlgorithmIdentifier{PUBLIC-KEY,
+ {PublicKeyAlgorithms}},
+ subjectPublicKey BIT STRING
+ },
+ attributes [0] IMPLICIT SET OF
+ AttributeSet{{AttributeList}}
+ },
+ signatureAlgorithm AlgorithmIdentifier
+ {SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}},
+ signature BIT STRING
+ }
+
+ TaggedContentInfo ::= SEQUENCE {
+ bodyPartID BodyPartID,
+ contentInfo ContentInfo
+ }
+
+ OTHER-MSG ::= TYPE-IDENTIFIER
+
+ -- No other messages currently defined
+
+ OtherMsgSet OTHER-MSG ::= {...}
+
+ OtherMsg ::= SEQUENCE {
+ bodyPartID BodyPartID,
+ otherMsgType OTHER-MSG.&id({OtherMsgSet}),
+ otherMsgValue OTHER-MSG.&Type({OtherMsgSet}{@otherMsgType}) }
+
+ -- This defines the response message in the protocol
+
+ ct-PKIResponse CONTENT-TYPE ::=
+ { PKIResponse IDENTIFIED BY id-cct-PKIResponse }
+ id-cct-PKIResponse OBJECT IDENTIFIER ::= { id-cct 3 }
+
+ ResponseBody ::= PKIResponse
+
+ PKIResponse ::= SEQUENCE {
+ controlSequence SEQUENCE SIZE(0..MAX) OF TaggedAttribute,
+ cmsSequence SEQUENCE SIZE(0..MAX) OF TaggedContentInfo,
+ otherMsgSequence SEQUENCE SIZE(0..MAX) OF OtherMsg
+ }
+
+ CMC-CONTROL ::= TYPE-IDENTIFIER
+
+ -- The following controls have the type OCTET STRING
+
+ cmc-identityProof CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-identityProof }
+ id-cmc-identityProof OBJECT IDENTIFIER ::= {id-cmc 3}
+
+ cmc-dataReturn CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-dataReturn }
+ id-cmc-dataReturn OBJECT IDENTIFIER ::= {id-cmc 4}
+
+ cmc-regInfo CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-regInfo }
+ id-cmc-regInfo OBJECT IDENTIFIER ::= {id-cmc 18}
+
+ cmc-responseInfo CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-responseInfo }
+ id-cmc-responseInfo OBJECT IDENTIFIER ::= {id-cmc 19}
+
+ cmc-queryPending CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-queryPending }
+ id-cmc-queryPending OBJECT IDENTIFIER ::= {id-cmc 21}
+
+ cmc-popLinkRandom CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-popLinkRandom }
+ id-cmc-popLinkRandom OBJECT IDENTIFIER ::= {id-cmc 22}
+
+ cmc-popLinkWitness CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-popLinkWitness }
+ id-cmc-popLinkWitness OBJECT IDENTIFIER ::= {id-cmc 23}
+
+ -- The following controls have the type UTF8String
+
+ cmc-identification CMC-CONTROL ::=
+ { UTF8String IDENTIFIED BY id-cmc-identification }
+ id-cmc-identification OBJECT IDENTIFIER ::= {id-cmc 2}
+
+ -- The following controls have the type INTEGER
+
+ cmc-transactionId CMC-CONTROL ::=
+ { INTEGER IDENTIFIED BY id-cmc-transactionId }
+ id-cmc-transactionId OBJECT IDENTIFIER ::= {id-cmc 5}
+
+ -- The following controls have the type OCTET STRING
+
+ cmc-senderNonce CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-senderNonce }
+
+ id-cmc-senderNonce OBJECT IDENTIFIER ::= {id-cmc 6}
+
+ cmc-recipientNonce CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-recipientNonce }
+ id-cmc-recipientNonce OBJECT IDENTIFIER ::= {id-cmc 7}
+
+ -- Used to return status in a response
+
+ cmc-statusInfo CMC-CONTROL ::=
+ { CMCStatusInfo IDENTIFIED BY id-cmc-statusInfo }
+ id-cmc-statusInfo OBJECT IDENTIFIER ::= {id-cmc 1}
+
+ CMCStatusInfo ::= SEQUENCE {
+ cMCStatus CMCStatus,
+ bodyList SEQUENCE SIZE (1..MAX) OF BodyPartID,
+ statusString UTF8String OPTIONAL,
+ otherInfo CHOICE {
+ failInfo CMCFailInfo,
+ pendInfo PendInfo
+ } OPTIONAL
+ }
+
+ PendInfo ::= SEQUENCE {
+ pendToken OCTET STRING,
+ pendTime GeneralizedTime
+ }
+
+ CMCStatus ::= INTEGER {
+ success (0),
+ failed (2),
+ pending (3),
+ noSupport (4),
+ confirmRequired (5),
+ popRequired (6),
+ partial (7)
+ }
+
+ -- Note:
+ -- The spelling of unsupportedExt is corrected in this version.
+ -- In RFC 2797, it was unsuportedExt.
+
+ CMCFailInfo ::= INTEGER {
+ badAlg (0),
+ badMessageCheck (1),
+ badRequest (2),
+ badTime (3),
+ badCertId (4),
+ unsuportedExt (5),
+ mustArchiveKeys (6),
+ badIdentity (7),
+ popRequired (8),
+ popFailed (9),
+ noKeyReuse (10),
+ internalCAError (11),
+ tryLater (12),
+ authDataFail (13)
+ }
+
+ -- Used for RAs to add extensions to certification requests
+
+ cmc-addExtensions CMC-CONTROL ::=
+ { AddExtensions IDENTIFIED BY id-cmc-addExtensions }
+ id-cmc-addExtensions OBJECT IDENTIFIER ::= {id-cmc 8}
+
+ AddExtensions ::= SEQUENCE {
+ pkiDataReference BodyPartID,
+ certReferences SEQUENCE OF BodyPartID,
+ extensions SEQUENCE OF Extension{{CertExtensions}}
+ }
+
+ cmc-encryptedPOP CMC-CONTROL ::=
+ { EncryptedPOP IDENTIFIED BY id-cmc-encryptedPOP }
+ cmc-decryptedPOP CMC-CONTROL ::=
+ { DecryptedPOP IDENTIFIED BY id-cmc-decryptedPOP }
+ id-cmc-encryptedPOP OBJECT IDENTIFIER ::= {id-cmc 9}
+ id-cmc-decryptedPOP OBJECT IDENTIFIER ::= {id-cmc 10}
+
+ EncryptedPOP ::= SEQUENCE {
+ request TaggedRequest,
+ cms ContentInfo,
+ thePOPAlgID AlgorithmIdentifier{MAC-ALGORITHM, {POPAlgs}},
+ witnessAlgID AlgorithmIdentifier{DIGEST-ALGORITHM,
+ {WitnessAlgs}},
+ witness OCTET STRING
+ }
+
+ POPAlgs MAC-ALGORITHM ::= {maca-hMAC-SHA1, ...}
+ WitnessAlgs DIGEST-ALGORITHM ::= {mda-sha1, ...}
+
+ DecryptedPOP ::= SEQUENCE {
+ bodyPartID BodyPartID,
+ thePOPAlgID AlgorithmIdentifier{MAC-ALGORITHM, {POPAlgs}},
+ thePOP OCTET STRING
+ }
+
+ cmc-lraPOPWitness CMC-CONTROL ::=
+ { LraPopWitness IDENTIFIED BY id-cmc-lraPOPWitness }
+
+ id-cmc-lraPOPWitness OBJECT IDENTIFIER ::= {id-cmc 11}
+
+ LraPopWitness ::= SEQUENCE {
+ pkiDataBodyid BodyPartID,
+ bodyIds SEQUENCE OF BodyPartID
+ }
+
+ --
+
+ cmc-getCert CMC-CONTROL ::=
+ { GetCert IDENTIFIED BY id-cmc-getCert }
+ id-cmc-getCert OBJECT IDENTIFIER ::= {id-cmc 15}
+
+ GetCert ::= SEQUENCE {
+ issuerName GeneralName,
+ serialNumber INTEGER }
+
+ cmc-getCRL CMC-CONTROL ::=
+ { GetCRL IDENTIFIED BY id-cmc-getCRL }
+ id-cmc-getCRL OBJECT IDENTIFIER ::= {id-cmc 16}
+ GetCRL ::= SEQUENCE {
+ issuerName Name,
+ cRLName GeneralName OPTIONAL,
+ time GeneralizedTime OPTIONAL,
+ reasons ReasonFlags OPTIONAL }
+
+ cmc-revokeRequest CMC-CONTROL ::=
+ { RevokeRequest IDENTIFIED BY id-cmc-revokeRequest}
+ id-cmc-revokeRequest OBJECT IDENTIFIER ::= {id-cmc 17}
+
+ RevokeRequest ::= SEQUENCE {
+ issuerName Name,
+ serialNumber INTEGER,
+ reason CRLReason,
+ invalidityDate GeneralizedTime OPTIONAL,
+ passphrase OCTET STRING OPTIONAL,
+ comment UTF8String OPTIONAL }
+
+ cmc-confirmCertAcceptance CMC-CONTROL ::=
+ { CMCCertId IDENTIFIED BY id-cmc-confirmCertAcceptance }
+ id-cmc-confirmCertAcceptance OBJECT IDENTIFIER ::= {id-cmc 24}
+
+ CMCCertId ::= IssuerAndSerialNumber
+
+ -- The following is used to request v3 extensions be added
+ -- to a certificate
+
+ at-extension-req ATTRIBUTE ::=
+ { TYPE ExtensionReq IDENTIFIED BY id-ExtensionReq }
+ id-ExtensionReq OBJECT IDENTIFIER ::= {iso(1) member-body(2) us(840)
+ rsadsi(113549) pkcs(1) pkcs-9(9) 14}
+
+ ExtensionReq ::= SEQUENCE SIZE (1..MAX) OF
+ Extension{{CertExtensions}}
+
+ -- The following allows Diffie-Hellman Certification Request
+ -- Messages to be well-formed
+
+ sa-noSignature SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER id-alg-noSignature
+ VALUE NoSignatureValue
+ PARAMS TYPE NULL ARE required
+ HASHES { mda-sha1 }
+ }
+ id-alg-noSignature OBJECT IDENTIFIER ::= {id-pkix id-alg(6) 2}
+
+ NoSignatureValue ::= OCTET STRING
+ -- Unauthenticated attribute to carry removable data.
+
+ id-aa OBJECT IDENTIFIER ::= { iso(1) member-body(2) us(840)
+ rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) id-aa(2)}
+
+ aa-cmc-unsignedData ATTRIBUTE ::=
+ { TYPE CMCUnsignedData IDENTIFIED BY id-aa-cmc-unsignedData }
+ id-aa-cmc-unsignedData OBJECT IDENTIFIER ::= {id-aa 34}
+
+ CMCUnsignedData ::= SEQUENCE {
+ bodyPartPath BodyPartPath,
+ identifier TYPE-IDENTIFIER.&id,
+ content TYPE-IDENTIFIER.&Type
+ }
+
+ -- Replaces CMC Status Info
+ --
+
+ cmc-statusInfoV2 CMC-CONTROL ::=
+ { CMCStatusInfoV2 IDENTIFIED BY id-cmc-statusInfoV2 }
+ id-cmc-statusInfoV2 OBJECT IDENTIFIER ::= {id-cmc 25}
+
+ EXTENDED-FAILURE-INFO ::= TYPE-IDENTIFIER
+
+ ExtendedFailures EXTENDED-FAILURE-INFO ::= {...}
+
+ CMCStatusInfoV2 ::= SEQUENCE {
+ cMCStatus CMCStatus,
+ bodyList SEQUENCE SIZE (1..MAX) OF
+ BodyPartReference,
+ statusString UTF8String OPTIONAL,
+ otherInfo CHOICE {
+ failInfo CMCFailInfo,
+ pendInfo PendInfo,
+ extendedFailInfo [1] SEQUENCE {
+ failInfoOID TYPE-IDENTIFIER.&id
+ ({ExtendedFailures}),
+ failInfoValue TYPE-IDENTIFIER.&Type
+ ({ExtendedFailures}
+ {@.failInfoOID})
+ }
+ } OPTIONAL
+ }
+
+ BodyPartReference ::= CHOICE {
+ bodyPartID BodyPartID,
+ bodyPartPath BodyPartPath
+ }
+
+ BodyPartPath ::= SEQUENCE SIZE (1..MAX) OF BodyPartID
+
+ -- Allow for distribution of trust anchors
+ --
+
+ cmc-trustedAnchors CMC-CONTROL ::=
+ { PublishTrustAnchors IDENTIFIED BY id-cmc-trustedAnchors }
+ id-cmc-trustedAnchors OBJECT IDENTIFIER ::= {id-cmc 26}
+
+ PublishTrustAnchors ::= SEQUENCE {
+ seqNumber INTEGER,
+ hashAlgorithm AlgorithmIdentifier{DIGEST-ALGORITHM,
+ {HashAlgorithms}},
+ anchorHashes SEQUENCE OF OCTET STRING
+ }
+
+ HashAlgorithms DIGEST-ALGORITHM ::= {
+ mda-sha1 | mda-sha256, ...
+ }
+
+ cmc-authData CMC-CONTROL ::=
+ { AuthPublish IDENTIFIED BY id-cmc-authData }
+ id-cmc-authData OBJECT IDENTIFIER ::= {id-cmc 27}
+
+ AuthPublish ::= BodyPartID
+
+ -- These two items use BodyPartList
+
+ cmc-batchRequests CMC-CONTROL ::=
+ { BodyPartList IDENTIFIED BY id-cmc-batchRequests }
+ id-cmc-batchRequests OBJECT IDENTIFIER ::= {id-cmc 28}
+
+ cmc-batchResponses CMC-CONTROL ::=
+ { BodyPartList IDENTIFIED BY id-cmc-batchResponses }
+ id-cmc-batchResponses OBJECT IDENTIFIER ::= {id-cmc 29}
+
+ BodyPartList ::= SEQUENCE SIZE (1..MAX) OF BodyPartID
+
+ cmc-publishCert CMC-CONTROL ::=
+ { CMCPublicationInfo IDENTIFIED BY id-cmc-publishCert }
+ id-cmc-publishCert OBJECT IDENTIFIER ::= {id-cmc 30}
+
+ CMCPublicationInfo ::= SEQUENCE {
+ hashAlg AlgorithmIdentifier{DIGEST-ALGORITHM,
+ {HashAlgorithms}},
+ certHashes SEQUENCE OF OCTET STRING,
+ pubInfo PKIPublicationInfo
+ }
+
+ cmc-modCertTemplate CMC-CONTROL ::=
+ { ModCertTemplate IDENTIFIED BY id-cmc-modCertTemplate }
+ id-cmc-modCertTemplate OBJECT IDENTIFIER ::= {id-cmc 31}
+
+ ModCertTemplate ::= SEQUENCE {
+ pkiDataReference BodyPartPath,
+ certReferences BodyPartList,
+ replace BOOLEAN DEFAULT TRUE,
+ certTemplate CertTemplate
+ }
+
+ -- Inform follow-on servers that one or more controls have
+ -- already been processed
+
+ cmc-controlProcessed CMC-CONTROL ::=
+ { ControlsProcessed IDENTIFIED BY id-cmc-controlProcessed }
+ id-cmc-controlProcessed OBJECT IDENTIFIER ::= {id-cmc 32}
+
+ ControlsProcessed ::= SEQUENCE {
+ bodyList SEQUENCE SIZE(1..MAX) OF BodyPartReference
+ }
+
+ -- Identity Proof control w/ algorithm agility
+
+ cmc-identityProofV2 CMC-CONTROL ::=
+ { IdentityProofV2 IDENTIFIED BY id-cmc-identityProofV2 }
+ id-cmc-identityProofV2 OBJECT IDENTIFIER ::= { id-cmc 33 }
+
+ IdentityProofV2 ::= SEQUENCE {
+ proofAlgID AlgorithmIdentifier{DIGEST-ALGORITHM,
+ {WitnessAlgs}},
+ macAlgId AlgorithmIdentifier{MAC-ALGORITHM, {POPAlgs}},
+ witness OCTET STRING
+ }
+
+ cmc-popLinkWitnessV2 CMC-CONTROL ::=
+ { PopLinkWitnessV2 IDENTIFIED BY id-cmc-popLinkWitnessV2 }
+ id-cmc-popLinkWitnessV2 OBJECT IDENTIFIER ::= { id-cmc 34 }
+
+ PopLinkWitnessV2 ::= SEQUENCE {
+ keyGenAlgorithm AlgorithmIdentifier{KEY-DERIVATION,
+ {KeyDevAlgs}},
+ macAlgorithm AlgorithmIdentifier{MAC-ALGORITHM, {POPAlgs}},
+ witness OCTET STRING
+ }
+
+ KeyDevAlgs KEY-DERIVATION ::= {kda-PBKDF2, ...}
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/External-References.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/External-References.asn1
index 9a7d4936a6..9a7d4936a6 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/External-References.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/External-References.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/GULSProtectionMappings.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/GULSProtectionMappings.asn1
index 9b6a426ca2..9b6a426ca2 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/GULSProtectionMappings.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/GULSProtectionMappings.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/GenericProtectingTransferSyntax.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/GenericProtectingTransferSyntax.asn1
index c59451dcdb..c59451dcdb 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/GenericProtectingTransferSyntax.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/GenericProtectingTransferSyntax.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Coding-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Coding-Attributes.asn1
index 60acbb3b5c..60acbb3b5c 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Coding-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Coding-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Presentation-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Presentation-Attributes.asn1
index 84c1ee9851..84c1ee9851 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Presentation-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Presentation-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Profile-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Profile-Attributes.asn1
index 28daa467e1..28daa467e1 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Profile-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Profile-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/GulsSecurityExchanges.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/GulsSecurityExchanges.asn1
index 336b824174..336b824174 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/GulsSecurityExchanges.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/GulsSecurityExchanges.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/GulsSecurityTransformations.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/GulsSecurityTransformations.asn1
index db2725c37d..db2725c37d 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/GulsSecurityTransformations.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/GulsSecurityTransformations.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/HierarchicalOperationalBindings.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/HierarchicalOperationalBindings.asn1
index 4e0084b079..4e0084b079 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/HierarchicalOperationalBindings.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/HierarchicalOperationalBindings.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSAbstractService.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSAbstractService.asn1
index 3fec8ae64a..3fec8ae64a 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSAbstractService.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSAbstractService.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSAutoActionTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSAutoActionTypes.asn1
index 8c0c8138e2..8c0c8138e2 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSAutoActionTypes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSAutoActionTypes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedBodyPartTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedBodyPartTypes.asn1
index 9805a6189d..9805a6189d 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedBodyPartTypes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedBodyPartTypes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedBodyPartTypes2.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedBodyPartTypes2.asn1
index b39e03c3b6..b39e03c3b6 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedBodyPartTypes2.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedBodyPartTypes2.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedVoiceBodyPartType.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedVoiceBodyPartType.asn1
index 171f4b4223..171f4b4223 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedVoiceBodyPartType.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedVoiceBodyPartType.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSFileTransferBodyPartType.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSFileTransferBodyPartType.asn1
index 59de6d1b04..59de6d1b04 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSFileTransferBodyPartType.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSFileTransferBodyPartType.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSForwardedContentBodyPartType.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSForwardedContentBodyPartType.asn1
index 57faac6587..57faac6587 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSForwardedContentBodyPartType.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSForwardedContentBodyPartType.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSForwardedReportBodyPartType.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSForwardedReportBodyPartType.asn1
index 4e46c7679b..4e46c7679b 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSForwardedReportBodyPartType.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSForwardedReportBodyPartType.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSFunctionalObjects.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSFunctionalObjects.asn1
index 09ef4de282..09ef4de282 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSFunctionalObjects.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSFunctionalObjects.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSHeadingExtensions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSHeadingExtensions.asn1
index 752e8d05e1..752e8d05e1 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSHeadingExtensions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSHeadingExtensions.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSInformationObjects.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSInformationObjects.asn1
index 3fb0463ee7..3fb0463ee7 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSInformationObjects.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSInformationObjects.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSMessageStoreAttributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSMessageStoreAttributes.asn1
index 719bca4987..719bca4987 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSMessageStoreAttributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSMessageStoreAttributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSObjectIdentifiers.asn1
index 6e5c01ab40..6e5c01ab40 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSObjectIdentifiers.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSObjectIdentifiers.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSObjectIdentifiers2.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSObjectIdentifiers2.asn1
index 2b46b27b3e..2b46b27b3e 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSObjectIdentifiers2.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSObjectIdentifiers2.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSSecurityExtensions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSSecurityExtensions.asn1
index 8c692ccb31..8c692ccb31 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSSecurityExtensions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSSecurityExtensions.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSUpperBounds.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSUpperBounds.asn1
index 27324f614f..27324f614f 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSUpperBounds.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSUpperBounds.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/ISO-STANDARD-9541-FONT-ATTRIBUTE-SET.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/ISO-STANDARD-9541-FONT-ATTRIBUTE-SET.asn1
index b7efd7417e..b7efd7417e 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/ISO-STANDARD-9541-FONT-ATTRIBUTE-SET.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/ISO-STANDARD-9541-FONT-ATTRIBUTE-SET.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/ISO8571-FTAM.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/ISO8571-FTAM.asn1
index a57a276704..a57a276704 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/ISO8571-FTAM.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/ISO8571-FTAM.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/ISO9541-SN.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/ISO9541-SN.asn1
index 0149602040..0149602040 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/ISO9541-SN.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/ISO9541-SN.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Identifiers-and-Expressions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Identifiers-and-Expressions.asn1
index bd1d8d3c48..bd1d8d3c48 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Identifiers-and-Expressions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Identifiers-and-Expressions.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/InformationFramework.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/InformationFramework.asn1
index 813ac9c6a0..813ac9c6a0 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/InformationFramework.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/InformationFramework.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Interchange-Data-Elements.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Interchange-Data-Elements.asn1
index 2c78360b7b..2c78360b7b 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Interchange-Data-Elements.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Interchange-Data-Elements.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Layout-Descriptors.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Layout-Descriptors.asn1
index 92c887bb06..92c887bb06 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Layout-Descriptors.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Layout-Descriptors.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Link-Descriptors.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Link-Descriptors.asn1
index 64fc4436e4..64fc4436e4 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Link-Descriptors.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Link-Descriptors.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Location-Expressions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Location-Expressions.asn1
index 5de6491621..5de6491621 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Location-Expressions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Location-Expressions.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Logical-Descriptors.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Logical-Descriptors.asn1
index fab36bf12a..fab36bf12a 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Logical-Descriptors.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Logical-Descriptors.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MHSObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MHSObjectIdentifiers.asn1
index 187c3c8ad4..187c3c8ad4 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MHSObjectIdentifiers.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MHSObjectIdentifiers.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MHSProtocolObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MHSProtocolObjectIdentifiers.asn1
index 40f53b9458..40f53b9458 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MHSProtocolObjectIdentifiers.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MHSProtocolObjectIdentifiers.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSAbstractService.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSAbstractService.asn1
index 052b3b2041..052b3b2041 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MSAbstractService.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSAbstractService.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSAccessProtocol.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSAccessProtocol.asn1
index b69d72b3ed..b69d72b3ed 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MSAccessProtocol.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSAccessProtocol.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSGeneralAttributeTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSGeneralAttributeTypes.asn1
index 99d34b2883..99d34b2883 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MSGeneralAttributeTypes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSGeneralAttributeTypes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSGeneralAutoActionTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSGeneralAutoActionTypes.asn1
index eceae4ab44..eceae4ab44 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MSGeneralAutoActionTypes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSGeneralAutoActionTypes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSMatchingRules.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSMatchingRules.asn1
index 37c894da86..37c894da86 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MSMatchingRules.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSMatchingRules.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSObjectIdentifiers.asn1
index df194f838c..df194f838c 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MSObjectIdentifiers.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSObjectIdentifiers.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSUpperBounds.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSUpperBounds.asn1
index 6494fbd3ef..6494fbd3ef 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MSUpperBounds.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSUpperBounds.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MTAAbstractService.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MTAAbstractService.asn1
index 38035c77ae..38035c77ae 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MTAAbstractService.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MTAAbstractService.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MTSAbstractService.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSAbstractService.asn1
index 68a5118bc8..68a5118bc8 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MTSAbstractService.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSAbstractService.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MTSAbstractService88.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSAbstractService88.asn1
index f66d117f35..f66d117f35 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MTSAbstractService88.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSAbstractService88.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MTSAccessProtocol.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSAccessProtocol.asn1
index 03181c5951..03181c5951 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MTSAccessProtocol.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSAccessProtocol.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MTSObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSObjectIdentifiers.asn1
index 1615b241ee..1615b241ee 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MTSObjectIdentifiers.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSObjectIdentifiers.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MTSUpperBounds.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSUpperBounds.asn1
index 10eac962cb..10eac962cb 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MTSUpperBounds.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSUpperBounds.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Notation.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Notation.asn1
index 96dfc39b6a..96dfc39b6a 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Notation.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Notation.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/OCSP-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/OCSP-2009.asn1
new file mode 100644
index 0000000000..db500fe9a1
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/OCSP-2009.asn1
@@ -0,0 +1,183 @@
+ OCSP-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-ocsp-02(48)}
+ DEFINITIONS EXPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ Extensions{}, EXTENSION, ATTRIBUTE
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57)}
+
+ AlgorithmIdentifier{}, DIGEST-ALGORITHM, SIGNATURE-ALGORITHM
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ AuthorityInfoAccessSyntax, GeneralName, CrlEntryExtensions
+ FROM PKIX1Implicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)}
+
+ Name, CertificateSerialNumber, id-kp, id-ad-ocsp, Certificate
+ FROM PKIX1Explicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51)}
+
+ sa-dsaWithSHA1, sa-rsaWithMD2, sa-rsaWithMD5, sa-rsaWithSHA1
+ FROM PKIXAlgs-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-algorithms2008-02(56)};
+
+ OCSPRequest ::= SEQUENCE {
+ tbsRequest TBSRequest,
+ optionalSignature [0] EXPLICIT Signature OPTIONAL }
+
+ TBSRequest ::= SEQUENCE {
+ version [0] EXPLICIT Version DEFAULT v1,
+ requestorName [1] EXPLICIT GeneralName OPTIONAL,
+ requestList SEQUENCE OF Request,
+ requestExtensions [2] EXPLICIT Extensions {{re-ocsp-nonce |
+ re-ocsp-response, ...}} OPTIONAL }
+
+ Signature ::= SEQUENCE {
+ signatureAlgorithm AlgorithmIdentifier
+ { SIGNATURE-ALGORITHM, {...}},
+ signature BIT STRING,
+ certs [0] EXPLICIT SEQUENCE OF Certificate OPTIONAL }
+
+ Version ::= INTEGER { v1(0) }
+
+ Request ::= SEQUENCE {
+ reqCert CertID,
+ singleRequestExtensions [0] EXPLICIT Extensions
+ { {re-ocsp-service-locator,
+ ...}} OPTIONAL }
+
+ CertID ::= SEQUENCE {
+ hashAlgorithm AlgorithmIdentifier
+ {DIGEST-ALGORITHM, {...}},
+ issuerNameHash OCTET STRING, -- Hash of Issuer's DN
+ issuerKeyHash OCTET STRING, -- Hash of Issuer's public key
+ serialNumber CertificateSerialNumber }
+
+ OCSPResponse ::= SEQUENCE {
+ responseStatus OCSPResponseStatus,
+ responseBytes [0] EXPLICIT ResponseBytes OPTIONAL }
+
+ OCSPResponseStatus ::= ENUMERATED {
+ successful (0), --Response has valid confirmations
+ malformedRequest (1), --Illegal confirmation request
+ internalError (2), --Internal error in issuer
+ tryLater (3), --Try again later
+ -- (4) is not used
+ sigRequired (5), --Must sign the request
+ unauthorized (6) --Request unauthorized
+ }
+
+ RESPONSE ::= TYPE-IDENTIFIER
+
+ ResponseSet RESPONSE ::= {basicResponse, ...}
+
+ ResponseBytes ::= SEQUENCE {
+ responseType RESPONSE.
+ &id ({ResponseSet}),
+ response OCTET STRING (CONTAINING RESPONSE.
+ &Type({ResponseSet}{@responseType}))}
+
+ basicResponse RESPONSE ::=
+ { BasicOCSPResponse IDENTIFIED BY id-pkix-ocsp-basic }
+
+ BasicOCSPResponse ::= SEQUENCE {
+ tbsResponseData ResponseData,
+ signatureAlgorithm AlgorithmIdentifier{SIGNATURE-ALGORITHM,
+ {sa-dsaWithSHA1 | sa-rsaWithSHA1 |
+ sa-rsaWithMD5 | sa-rsaWithMD2, ...}},
+ signature BIT STRING,
+ certs [0] EXPLICIT SEQUENCE OF Certificate OPTIONAL }
+
+ ResponseData ::= SEQUENCE {
+ version [0] EXPLICIT Version DEFAULT v1,
+ responderID ResponderID,
+ producedAt GeneralizedTime,
+ responses SEQUENCE OF SingleResponse,
+ responseExtensions [1] EXPLICIT Extensions
+ {{re-ocsp-nonce, ...}} OPTIONAL }
+
+ ResponderID ::= CHOICE {
+ byName [1] Name,
+ byKey [2] KeyHash }
+
+ KeyHash ::= OCTET STRING --SHA-1 hash of responder's public key
+ -- (excluding the tag and length fields)
+
+ SingleResponse ::= SEQUENCE {
+ certID CertID,
+ certStatus CertStatus,
+ thisUpdate GeneralizedTime,
+ nextUpdate [0] EXPLICIT GeneralizedTime OPTIONAL,
+ singleExtensions [1] EXPLICIT Extensions{{re-ocsp-crl |
+ re-ocsp-archive-cutoff |
+ CrlEntryExtensions, ...}
+ } OPTIONAL }
+
+ CertStatus ::= CHOICE {
+ good [0] IMPLICIT NULL,
+ revoked [1] IMPLICIT RevokedInfo,
+ unknown [2] IMPLICIT UnknownInfo }
+
+ RevokedInfo ::= SEQUENCE {
+ revocationTime GeneralizedTime,
+ revocationReason [0] EXPLICIT CRLReason OPTIONAL }
+
+ UnknownInfo ::= NULL
+
+ CRLReason ::= INTEGER
+
+ ArchiveCutoff ::= GeneralizedTime
+
+ AcceptableResponses ::= SEQUENCE OF RESPONSE.&id({ResponseSet})
+
+ ServiceLocator ::= SEQUENCE {
+ issuer Name,
+ locator AuthorityInfoAccessSyntax }
+
+ CrlID ::= SEQUENCE {
+ crlUrl [0] EXPLICIT IA5String OPTIONAL,
+ crlNum [1] EXPLICIT INTEGER OPTIONAL,
+ crlTime [2] EXPLICIT GeneralizedTime OPTIONAL }
+
+ -- Request Extensions
+
+ re-ocsp-nonce EXTENSION ::= { SYNTAX OCTET STRING IDENTIFIED
+ BY id-pkix-ocsp-nonce }
+ re-ocsp-response EXTENSION ::= { SYNTAX AcceptableResponses IDENTIFIED
+ BY id-pkix-ocsp-response }
+ re-ocsp-service-locator EXTENSION ::= { SYNTAX ServiceLocator
+ IDENTIFIED BY
+ id-pkix-ocsp-service-locator }
+
+ -- Response Extensions
+
+ re-ocsp-crl EXTENSION ::= { SYNTAX CrlID IDENTIFIED BY
+ id-pkix-ocsp-crl }
+ re-ocsp-archive-cutoff EXTENSION ::= { SYNTAX ArchiveCutoff
+ IDENTIFIED BY
+ id-pkix-ocsp-archive-cutoff }
+
+ -- Object Identifiers
+
+ id-kp-OCSPSigning OBJECT IDENTIFIER ::= { id-kp 9 }
+ id-pkix-ocsp OBJECT IDENTIFIER ::= id-ad-ocsp
+ id-pkix-ocsp-basic OBJECT IDENTIFIER ::= { id-pkix-ocsp 1 }
+ id-pkix-ocsp-nonce OBJECT IDENTIFIER ::= { id-pkix-ocsp 2 }
+ id-pkix-ocsp-crl OBJECT IDENTIFIER ::= { id-pkix-ocsp 3 }
+ id-pkix-ocsp-response OBJECT IDENTIFIER ::= { id-pkix-ocsp 4 }
+ id-pkix-ocsp-nocheck OBJECT IDENTIFIER ::= { id-pkix-ocsp 5 }
+ id-pkix-ocsp-archive-cutoff OBJECT IDENTIFIER ::= { id-pkix-ocsp 6 }
+ id-pkix-ocsp-service-locator OBJECT IDENTIFIER ::= { id-pkix-ocsp 7 }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/PKCS7.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/OLD-PKCS7.asn1
index ac449b59c7..ab555200bb 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/PKCS7.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/OLD-PKCS7.asn1
@@ -3,7 +3,7 @@
-- This Annex contains a module of PKCS#7 ASN.1 definitions conforming to current ASN.1 standards rather than the obsolescent (and now deprecated) 1988/90 version of ASN.1 used in version 1.5 of PKCS#7.
-- Extensions to PKCS#7 defined in RFC 2630 are included.
-- If differences are found between the ASN.1 in the following module and that in PKCS#7, the latter is definitive.
-PKCS7 {iso member-body usa(840) rsadsi(113549) pkcs(1) 7
+OLD-PKCS7 {iso member-body usa(840) rsadsi(113549) pkcs(1) 7
module(0) -- module not currently defined in PKCS#7 --} DEFINITIONS IMPLICIT
TAGS ::=
BEGIN
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/ObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/ObjectIdentifiers.asn1
index b4f91f50c5..b4f91f50c5 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/ObjectIdentifiers.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/ObjectIdentifiers.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/OperationalBindingManagement.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/OperationalBindingManagement.asn1
index 2044feb155..2044feb155 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/OperationalBindingManagement.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/OperationalBindingManagement.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-10.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-10.asn1
new file mode 100644
index 0000000000..a5fd0fefb9
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-10.asn1
@@ -0,0 +1,56 @@
+ PKCS-10
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkcs10-2009(69)}
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ AlgorithmIdentifier{}, DIGEST-ALGORITHM, SIGNATURE-ALGORITHM,
+ PUBLIC-KEY
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ ATTRIBUTE, Name
+ FROM PKIX1Explicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51)};
+
+ -- Certificate requests
+ CertificationRequestInfo ::= SEQUENCE {
+ version INTEGER { v1(0) } (v1, ... ),
+ subject Name,
+ subjectPKInfo SubjectPublicKeyInfo{{ PKInfoAlgorithms }},
+ attributes [0] Attributes{{ CRIAttributes }}
+ }
+
+ SubjectPublicKeyInfo {PUBLIC-KEY: IOSet} ::= SEQUENCE {
+ algorithm AlgorithmIdentifier {PUBLIC-KEY, {IOSet}},
+ subjectPublicKey BIT STRING
+ }
+
+ PKInfoAlgorithms PUBLIC-KEY ::= {
+ ... -- add any locally defined algorithms here -- }
+
+ Attributes { ATTRIBUTE:IOSet } ::= SET OF Attribute{{ IOSet }}
+
+ CRIAttributes ATTRIBUTE ::= {
+ ... -- add any locally defined attributes here -- }
+
+ Attribute { ATTRIBUTE:IOSet } ::= SEQUENCE {
+ type ATTRIBUTE.&id({IOSet}),
+ values SET SIZE(1..MAX) OF ATTRIBUTE.&Type({IOSet}{@type})
+ }
+
+ CertificationRequest ::= SEQUENCE {
+ certificationRequestInfo CertificationRequestInfo,
+ signatureAlgorithm AlgorithmIdentifier{SIGNATURE-ALGORITHM,
+ { SignatureAlgorithms }},
+ signature BIT STRING
+ }
+
+ SignatureAlgorithms SIGNATURE-ALGORITHM ::= {
+ ... -- add any locally defined algorithms here -- }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-12.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-12.asn1
new file mode 100644
index 0000000000..5b37a552f9
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-12.asn1
@@ -0,0 +1,174 @@
+PKCS-12 {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1)
+ pkcs-12(12) modules(0) pkcs-12(1)}
+
+-- $Revision$
+
+DEFINITIONS IMPLICIT TAGS ::=
+
+BEGIN
+
+-- EXPORTS ALL
+-- All types and values defined in this module is exported for use in
+-- other ASN.1 modules.
+
+IMPORTS
+
+informationFramework
+ FROM UsefulDefinitions {joint-iso-itu-t(2) ds(5) module(1)
+ usefulDefinitions(0) 3}
+
+ATTRIBUTE
+ FROM InformationFramework informationFramework
+
+ContentInfo, DigestInfo
+ FROM PKCS-7 {iso(1) member-body(2) us(840) rsadsi(113549)
+ pkcs(1) pkcs-7(7) modules(0) pkcs-7(1)}
+
+PrivateKeyInfo, EncryptedPrivateKeyInfo
+ FROM PKCS-8 {iso(1) member-body(2) us(840) rsadsi(113549)
+ pkcs(1) pkcs-8(8) modules(1) pkcs-8(1)}
+
+pkcs-9, friendlyName, localKeyId, certTypes, crlTypes
+ FROM PKCS-9 {iso(1) member-body(2) us(840) rsadsi(113549)
+ pkcs(1) pkcs-9(9) modules(0) pkcs-9(1)};
+
+-- Object identifiers
+
+rsadsi OBJECT IDENTIFIER ::= {iso(1) member-body(2) us(840) rsadsi(113549)}
+pkcs OBJECT IDENTIFIER ::= {rsadsi pkcs(1)}
+pkcs-12 OBJECT IDENTIFIER ::= {pkcs 12}
+pkcs-12PbeIds OBJECT IDENTIFIER ::= {pkcs-12 1}
+pbeWithSHAAnd128BitRC4 OBJECT IDENTIFIER ::= {pkcs-12PbeIds 1}
+pbeWithSHAAnd40BitRC4 OBJECT IDENTIFIER ::= {pkcs-12PbeIds 2}
+pbeWithSHAAnd3-KeyTripleDES-CBC OBJECT IDENTIFIER ::= {pkcs-12PbeIds 3}
+pbeWithSHAAnd2-KeyTripleDES-CBC OBJECT IDENTIFIER ::= {pkcs-12PbeIds 4}
+pbeWithSHAAnd128BitRC2-CBC OBJECT IDENTIFIER ::= {pkcs-12PbeIds 5}
+pbewithSHAAnd40BitRC2-CBC OBJECT IDENTIFIER ::= {pkcs-12PbeIds 6}
+
+bagtypes OBJECT IDENTIFIER ::= {pkcs-12 10 1}
+
+-- The PFX PDU
+
+PFX ::= SEQUENCE {
+ version INTEGER {v3(3)}(v3,...),
+ authSafe ContentInfo,
+ macData MacData OPTIONAL
+}
+
+MacData ::= SEQUENCE {
+ mac DigestInfo,
+ macSalt OCTET STRING,
+ iterations INTEGER DEFAULT 1
+-- Note: The default is for historical reasons and its use is
+-- deprecated. A higher value, like 1024 is recommended.
+}
+
+AuthenticatedSafe ::= SEQUENCE OF ContentInfo
+ -- Data if unencrypted
+ -- EncryptedData if password-encrypted
+ -- EnvelopedData if public key-encrypted
+
+SafeContents ::= SEQUENCE OF SafeBag
+
+SafeBag ::= SEQUENCE {
+ bagId BAG-TYPE.&id ({PKCS12BagSet}),
+ bagValue [0] EXPLICIT BAG-TYPE.&Type({PKCS12BagSet}{@bagId}),
+ bagAttributes SET OF PKCS12Attribute OPTIONAL
+}
+
+-- Bag types
+
+keyBag BAG-TYPE ::=
+ {KeyBag IDENTIFIED BY {bagtypes 1}}
+pkcs8ShroudedKeyBag BAG-TYPE ::=
+ {PKCS8ShroudedKeyBag IDENTIFIED BY {bagtypes 2}}
+certBag BAG-TYPE ::=
+ {CertBag IDENTIFIED BY {bagtypes 3}}
+crlBag BAG-TYPE ::=
+ {CRLBag IDENTIFIED BY {bagtypes 4}}
+secretBag BAG-TYPE ::=
+ {SecretBag IDENTIFIED BY {bagtypes 5}}
+safeContentsBag BAG-TYPE ::=
+ {SafeContents IDENTIFIED BY {bagtypes 6}}
+
+PKCS12BagSet BAG-TYPE ::= {
+ keyBag |
+ pkcs8ShroudedKeyBag |
+ certBag |
+ crlBag |
+ secretBag |
+ safeContentsBag,
+ ... -- For future extensions
+}
+
+BAG-TYPE ::= TYPE-IDENTIFIER
+
+-- KeyBag
+
+KeyBag ::= PrivateKeyInfo
+
+-- Shrouded KeyBag
+
+PKCS8ShroudedKeyBag ::= EncryptedPrivateKeyInfo
+
+-- CertBag
+
+CertBag ::= SEQUENCE {
+ certId BAG-TYPE.&id ({CertTypes}),
+ certValue [0] EXPLICIT BAG-TYPE.&Type ({CertTypes}{@certId})
+}
+
+x509Certificate BAG-TYPE ::=
+ {OCTET STRING IDENTIFIED BY {certTypes 1}}
+ -- DER-encoded X.509 certificate stored in OCTET STRING
+sdsiCertificate BAG-TYPE ::=
+ {IA5String IDENTIFIED BY {certTypes 2}}
+ -- Base64-encoded SDSI certificate stored in IA5String
+
+CertTypes BAG-TYPE ::= {
+ x509Certificate |
+ sdsiCertificate,
+ ... -- For future extensions
+}
+
+-- CRLBag
+
+CRLBag ::= SEQUENCE {
+ crlId BAG-TYPE.&id ({CRLTypes}),
+ crlValue [0] EXPLICIT BAG-TYPE.&Type ({CRLTypes}{@crlId})
+}
+
+x509CRL BAG-TYPE ::=
+ {OCTET STRING IDENTIFIED BY {crlTypes 1}}
+ -- DER-encoded X.509 CRL stored in OCTET STRING
+
+CRLTypes BAG-TYPE ::= {
+ x509CRL,
+ ... -- For future extensions
+}
+
+-- Secret Bag
+
+SecretBag ::= SEQUENCE {
+ secretTypeId BAG-TYPE.&id ({SecretTypes}),
+ secretValue [0] EXPLICIT BAG-TYPE.&Type ({SecretTypes}{@secretTypeId})
+}
+
+SecretTypes BAG-TYPE ::= {
+ ... -- For future extensions
+}
+
+-- Attributes
+
+PKCS12Attribute ::= SEQUENCE {
+ attrId ATTRIBUTE.&id ({PKCS12AttrSet}),
+ attrValues SET OF ATTRIBUTE.&Type ({PKCS12AttrSet}{@attrId})
+} -- This type is compatible with the X.500 type 'Attribute'
+
+PKCS12AttrSet ATTRIBUTE ::= {
+ friendlyName |
+ localKeyId,
+ ... -- Other attributes are allowed
+}
+
+END \ No newline at end of file
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-5.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-5.asn1
new file mode 100644
index 0000000000..91b0dc36bf
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-5.asn1
@@ -0,0 +1,202 @@
+-- PKCS #5 v2.1 ASN.1 Module
+-- Revised October 27, 2012
+
+-- This module has been checked for conformance with the
+-- ASN.1 standard by the OSS ASN.1 Tools
+
+PKCS-5 {
+ iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-5(5) modules(16)
+ pkcs5v2-1(2)}
+
+DEFINITIONS EXPLICIT TAGS ::=
+
+BEGIN
+
+-- ============================
+-- Basic object identifiers
+-- ============================
+
+nistAlgorithms OBJECT IDENTIFIER ::=
+ {joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101) csor(3) 4}
+oiw OBJECT IDENTIFIER ::= {iso(1) identified-organization(3) 14}
+rsadsi OBJECT IDENTIFIER ::= {iso(1) member-body(2) us(840) 113549}
+pkcs OBJECT IDENTIFIER ::= {rsadsi 1}
+pkcs-5 OBJECT IDENTIFIER ::= {pkcs 5}
+
+
+-- ============================
+-- Basic types and classes
+-- ============================
+
+AlgorithmIdentifier { ALGORITHM-IDENTIFIER:InfoObjectSet } ::= SEQUENCE {
+ algorithm ALGORITHM-IDENTIFIER.&id({InfoObjectSet}),
+ parameters ALGORITHM-IDENTIFIER.&Type({InfoObjectSet} {@algorithm}) OPTIONAL
+}
+
+ALGORITHM-IDENTIFIER ::= TYPE-IDENTIFIER
+
+
+-- ============================
+-- PBKDF2
+-- ============================
+
+PBKDF2Algorithms ALGORITHM-IDENTIFIER ::=
+ { {PBKDF2-params IDENTIFIED BY id-PBKDF2}, ...}
+
+id-PBKDF2 OBJECT IDENTIFIER ::= {pkcs-5 12}
+
+algid-hmacWithSHA1 AlgorithmIdentifier {{PBKDF2-PRFs}} ::=
+ {algorithm id-hmacWithSHA1, parameters NULL : NULL}
+
+PBKDF2-params ::= SEQUENCE {
+ salt CHOICE {
+ specified OCTET STRING,
+ otherSource AlgorithmIdentifier {{PBKDF2-SaltSources}}
+ },
+ iterationCount INTEGER (1..MAX),
+ keyLength INTEGER (1..MAX) OPTIONAL,
+ prf AlgorithmIdentifier {{PBKDF2-PRFs}} DEFAULT algid-hmacWithSHA1
+}
+
+PBKDF2-SaltSources ALGORITHM-IDENTIFIER ::= { ... }
+
+PBKDF2-PRFs ALGORITHM-IDENTIFIER ::= {
+ {NULL IDENTIFIED BY id-hmacWithSHA1} |
+ {NULL IDENTIFIED BY id-hmacWithSHA224} |
+ {NULL IDENTIFIED BY id-hmacWithSHA256} |
+ {NULL IDENTIFIED BY id-hmacWithSHA384} |
+ {NULL IDENTIFIED BY id-hmacWithSHA512} |
+ {NULL IDENTIFIED BY id-hmacWithSHA512-224} |
+ {NULL IDENTIFIED BY id-hmacWithSHA512-256},
+ ...
+}
+
+
+-- ============================
+ -- PBES1
+-- ============================
+
+PBES1Algorithms ALGORITHM-IDENTIFIER ::= {
+ {PBEParameter IDENTIFIED BY pbeWithMD2AndDES-CBC} |
+ {PBEParameter IDENTIFIED BY pbeWithMD2AndRC2-CBC} |
+ {PBEParameter IDENTIFIED BY pbeWithMD5AndDES-CBC} |
+ {PBEParameter IDENTIFIED BY pbeWithMD5AndRC2-CBC} |
+ {PBEParameter IDENTIFIED BY pbeWithSHA1AndDES-CBC} |
+ {PBEParameter IDENTIFIED BY pbeWithSHA1AndRC2-CBC},
+ ...
+}
+
+pbeWithMD2AndDES-CBC OBJECT IDENTIFIER ::= {pkcs-5 1}
+pbeWithMD2AndRC2-CBC OBJECT IDENTIFIER ::= {pkcs-5 4}
+pbeWithMD5AndDES-CBC OBJECT IDENTIFIER ::= {pkcs-5 3}
+pbeWithMD5AndRC2-CBC OBJECT IDENTIFIER ::= {pkcs-5 6}
+pbeWithSHA1AndDES-CBC OBJECT IDENTIFIER ::= {pkcs-5 10}
+pbeWithSHA1AndRC2-CBC OBJECT IDENTIFIER ::= {pkcs-5 11}
+
+PBEParameter ::= SEQUENCE {
+ salt OCTET STRING (SIZE(8)),
+ iterationCount INTEGER
+}
+
+
+-- ============================
+-- PBES2
+-- ============================
+
+PBES2Algorithms ALGORITHM-IDENTIFIER ::= {
+ {PBES2-params IDENTIFIED BY id-PBES2},
+ ...
+}
+
+id-PBES2 OBJECT IDENTIFIER ::= {pkcs-5 13}
+
+PBES2-params ::= SEQUENCE {
+ keyDerivationFunc AlgorithmIdentifier {{PBES2-KDFs}},
+ encryptionScheme AlgorithmIdentifier {{PBES2-Encs}}
+}
+
+PBES2-KDFs ALGORITHM-IDENTIFIER ::= {
+ {PBKDF2-params IDENTIFIED BY id-PBKDF2},
+ ...
+}
+
+PBES2-Encs ALGORITHM-IDENTIFIER ::= { ... }
+
+
+-- ============================
+-- PBMAC1
+-- ============================
+
+PBMAC1Algorithms ALGORITHM-IDENTIFIER ::= {
+ {PBMAC1-params IDENTIFIED BY id-PBMAC1},
+ ...
+}
+
+id-PBMAC1 OBJECT IDENTIFIER ::= {pkcs-5 14}
+
+PBMAC1-params ::= SEQUENCE {
+ keyDerivationFunc AlgorithmIdentifier {{PBMAC1-KDFs}},
+ messageAuthScheme AlgorithmIdentifier {{PBMAC1-MACs}}
+}
+
+PBMAC1-KDFs ALGORITHM-IDENTIFIER ::= {
+ {PBKDF2-params IDENTIFIED BY id-PBKDF2},
+ ...
+}
+
+PBMAC1-MACs ALGORITHM-IDENTIFIER ::= { ... }
+
+-- ============================
+-- Supporting techniques
+-- ============================
+
+digestAlgorithm OBJECT IDENTIFIER ::= {rsadsi 2}
+encryptionAlgorithm OBJECT IDENTIFIER ::= {rsadsi 3}
+
+SupportingAlgorithms ALGORITHM-IDENTIFIER ::= {
+ {NULL IDENTIFIED BY id-hmacWithSHA1} |
+ {OCTET STRING (SIZE(8)) IDENTIFIED BY desCBC} |
+ {OCTET STRING (SIZE(8)) IDENTIFIED BY des-EDE3-CBC} |
+ {RC2-CBC-Parameter IDENTIFIED BY rc2CBC} |
+ {RC5-CBC-Parameters IDENTIFIED BY rc5-CBC-PAD} |
+ {OCTET STRING (SIZE(16)) IDENTIFIED BY aes128-CBC-PAD} |
+ {OCTET STRING (SIZE(16)) IDENTIFIED BY aes192-CBC-PAD} |
+ {OCTET STRING (SIZE(16)) IDENTIFIED BY aes256-CBC-PAD},
+ ...
+}
+
+id-hmacWithSHA1 OBJECT IDENTIFIER ::= {digestAlgorithm 7}
+id-hmacWithSHA224 OBJECT IDENTIFIER ::= {digestAlgorithm 8}
+id-hmacWithSHA256 OBJECT IDENTIFIER ::= {digestAlgorithm 9}
+id-hmacWithSHA384 OBJECT IDENTIFIER ::= {digestAlgorithm 10}
+id-hmacWithSHA512 OBJECT IDENTIFIER ::= {digestAlgorithm 11}
+id-hmacWithSHA512-224 OBJECT IDENTIFIER ::= {digestAlgorithm 12}
+id-hmacWithSHA512-256 OBJECT IDENTIFIER ::= {digestAlgorithm 13}
+
+-- from OIW
+desCBC OBJECT IDENTIFIER ::= {oiw secsig(3) algorithms(2) 7}
+
+des-EDE3-CBC OBJECT IDENTIFIER ::= {encryptionAlgorithm 7}
+
+rc2CBC OBJECT IDENTIFIER ::= {encryptionAlgorithm 2}
+
+RC2-CBC-Parameter ::= SEQUENCE {
+ rc2ParameterVersion INTEGER OPTIONAL,
+ iv OCTET STRING (SIZE(8))
+}
+
+rc5-CBC-PAD OBJECT IDENTIFIER ::= {encryptionAlgorithm 9}
+
+RC5-CBC-Parameters ::= SEQUENCE {
+ version INTEGER {v1-0(16)} (v1-0),
+ rounds INTEGER (8..127),
+ blockSizeInBits INTEGER (64 | 128),
+ iv OCTET STRING OPTIONAL
+}
+
+aes OBJECT IDENTIFIER ::= { nistAlgorithms 1 }
+aes128-CBC-PAD OBJECT IDENTIFIER ::= { aes 2 }
+aes192-CBC-PAD OBJECT IDENTIFIER ::= { aes 22 }
+aes256-CBC-PAD OBJECT IDENTIFIER ::= { aes 42 }
+
+END \ No newline at end of file
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-7.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-7.asn1
new file mode 100644
index 0000000000..4cea8db240
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-7.asn1
@@ -0,0 +1,326 @@
+PKCS-7 {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-7(7)
+ modules(0) pkcs-7(1)}
+
+DEFINITIONS EXPLICIT TAGS ::=
+BEGIN
+
+--
+-- 3. Definitions
+--
+
+-- EXPORTS All;
+
+IMPORTS
+
+informationFramework, authenticationFramework
+ FROM UsefulDefinitions {joint-iso-itu-t ds(5) module(1)
+ usefulDefinitions(0) 3}
+
+ Name, ATTRIBUTE
+ FROM InformationFramework informationFramework
+
+ ALGORITHM, Certificate, CertificateSerialNumber,
+ CertificateList
+ FROM AuthenticationFramework authenticationFramework
+
+ contentType, messageDigest, signingTime, counterSignature
+ FROM PKCS-9 {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1)
+ pkcs-9(9) modules(0) pkcs-9(1)};
+--
+-- 6. Useful types
+--
+
+-- Also defined in X.509
+-- Redeclared here as a parameterized type
+AlgorithmIdentifier {ALGORITHM:IOSet} ::= SEQUENCE {
+ algorithm ALGORITHM.&id({IOSet}),
+ parameters ALGORITHM.&Type({IOSet}{@algorithm}) OPTIONAL
+}
+
+-- Also defined in X.501
+-- Redeclared here as a parameterized type
+Attribute { ATTRIBUTE:IOSet } ::= SEQUENCE {
+ type ATTRIBUTE.&id({IOSet}),
+ values SET SIZE (1..MAX) OF ATTRIBUTE.&Type({IOSet}{@type})
+}
+
+CertificateRevocationLists ::=
+ SET OF CertificateList
+
+Certificates ::=
+ SEQUENCE OF Certificate
+
+CRLSequence ::=
+ SEQUENCE OF CertificateList
+
+ContentEncryptionAlgorithmIdentifier ::=
+ AlgorithmIdentifier {{ContentEncryptionAlgorithms}}
+
+ContentEncryptionAlgorithms ALGORITHM ::= {
+ ... -- add any application-specific algorithms here
+}
+
+DigestAlgorithmIdentifier ::=
+ AlgorithmIdentifier {{DigestAlgorithms}}
+
+DigestAlgorithms ALGORITHM ::= {
+ ... -- add any application-specific algorithms here
+}
+
+DigestEncryptionAlgorithmIdentifier ::=
+ AlgorithmIdentifier {{DigestEncryptionAlgorithms}}
+
+DigestEncryptionAlgorithms ALGORITHM ::= {
+ ... -- add any application-specific algorithms here
+}
+
+ExtendedCertificateOrCertificate ::= CHOICE {
+ certificate Certificate, -- X.509
+ extendedCertificate [0] IMPLICIT ExtendedCertificate -- PKCS#6
+}
+
+ExtendedCertificate ::= Certificate -- cheating
+
+ExtendedCertificatesAndCertificates ::=
+ SET OF ExtendedCertificateOrCertificate
+
+IssuerAndSerialNumber ::= SEQUENCE {
+ issuer Name,
+ serialNumber CertificateSerialNumber
+}
+
+KeyEncryptionAlgorithmIdentifier ::=
+ AlgorithmIdentifier {{KeyEncryptionAlgorithms}}
+
+KeyEncryptionAlgorithms ALGORITHM ::= {
+ ... -- add any application-specific algorithms here
+}
+
+--
+-- 7. General syntax
+--
+
+ContentInfo ::= SEQUENCE {
+ contentType ContentType,
+ content [0] EXPLICIT CONTENTS.&Type({Contents}{@contentType})
+OPTIONAL
+}
+
+CONTENTS ::= TYPE-IDENTIFIER
+
+Contents CONTENTS ::= {
+ {Data IDENTIFIED BY data} |
+ {SignedData IDENTIFIED BY signedData} |
+ {EnvelopedData IDENTIFIED BY envelopedData} |
+ {SignedAndEnvelopedData IDENTIFIED BY signedAndEnvelopedData} |
+ {DigestedData IDENTIFIED BY digestedData} |
+ {EncryptedData IDENTIFIED BY encryptedData},
+ ... -- add any application-specific types/contents here
+}
+
+ContentType ::= CONTENTS.&id({Contents})
+
+--
+-- 8. Data content type
+--
+
+Data ::= OCTET STRING
+
+--
+-- 9. Signed-data content type
+--
+
+SignedData ::= SEQUENCE {
+ version INTEGER {sdVer1(1), sdVer2(2)} (sdVer1 | sdVer2),
+ digestAlgorithms
+ DigestAlgorithmIdentifiers,
+ contentInfo ContentInfo,
+ certificates CHOICE {
+ certSet [0] IMPLICIT ExtendedCertificatesAndCertificates,
+ certSequence [2] IMPLICIT Certificates
+ } OPTIONAL,
+ crls CHOICE {
+ crlSet [1] IMPLICIT CertificateRevocationLists,
+ crlSequence [3] IMPLICIT CRLSequence
+ } OPTIONAL,
+ signerInfos SignerInfos
+} (WITH COMPONENTS { ..., version (sdVer1),
+ digestAlgorithms (WITH COMPONENTS { ..., daSet PRESENT }),
+ certificates (WITH COMPONENTS { ..., certSequence ABSENT }),
+ crls (WITH COMPONENTS { ..., crlSequence ABSENT }),
+ signerInfos (WITH COMPONENTS { ..., siSet PRESENT })
+ } |
+ WITH COMPONENTS { ..., version (sdVer2),
+ digestAlgorithms (WITH COMPONENTS { ..., daSequence PRESENT }),
+ certificates (WITH COMPONENTS { ..., certSet ABSENT }),
+ crls (WITH COMPONENTS { ..., crlSet ABSENT }),
+ signerInfos (WITH COMPONENTS { ..., siSequence PRESENT })
+})
+
+SignerInfos ::= CHOICE {
+ siSet SET OF SignerInfo,
+ siSequence SEQUENCE OF SignerInfo
+}
+
+DigestAlgorithmIdentifiers ::= CHOICE {
+ daSet SET OF DigestAlgorithmIdentifier,
+ daSequence SEQUENCE OF DigestAlgorithmIdentifier
+}
+
+SignerInfo ::= SEQUENCE {
+ version INTEGER {siVer1(1), siVer2(2)} (siVer1 | siVer2),
+ issuerAndSerialNumber
+ IssuerAndSerialNumber,
+ digestAlgorithm DigestAlgorithmIdentifier,
+ authenticatedAttributes CHOICE {
+ aaSet [0] IMPLICIT SET OF Attribute {{Authenticated}},
+ aaSequence [2] EXPLICIT SEQUENCE OF Attribute {{Authenticated}}
+ -- Explicit because easier to compute digest on sequence of attributes and then reuse
+ -- encoded sequence in aaSequence.
+ } OPTIONAL,
+ digestEncryptionAlgorithm
+ DigestEncryptionAlgorithmIdentifier,
+ encryptedDigest EncryptedDigest,
+ unauthenticatedAttributes CHOICE {
+ uaSet [1] IMPLICIT SET OF Attribute {{Unauthenticated}},
+ uaSequence [3] IMPLICIT SEQUENCE OF Attribute {{Unauthenticated}}
+ } OPTIONAL
+} (WITH COMPONENTS { ..., version (siVer1),
+ authenticatedAttributes (WITH COMPONENTS { ..., aaSequence ABSENT }),
+ unauthenticatedAttributes (WITH COMPONENTS { ..., uaSequence ABSENT })
+} | WITH COMPONENTS { ..., version (siVer2),
+ authenticatedAttributes (WITH COMPONENTS { ..., aaSet ABSENT }),
+ unauthenticatedAttributes (WITH COMPONENTS { ..., uaSet ABSENT })
+})
+
+Authenticated ATTRIBUTE ::= {
+ contentType |
+ messageDigest,
+ ..., -- add application-specific attributes here
+ signingTime
+}
+
+Unauthenticated ATTRIBUTE ::= {
+ ..., -- add application-specific attributes here
+ counterSignature
+}
+
+EncryptedDigest ::= OCTET STRING
+
+DigestInfo ::= SEQUENCE {
+ digestAlgorithm DigestAlgorithmIdentifier,
+ digest Digest
+}
+
+Digest ::= OCTET STRING
+
+--
+-- 10. Enveloped-data content type
+--
+
+EnvelopedData ::= SEQUENCE {
+ version INTEGER {edVer0(0), edVer1(1)} (edVer0 | edVer1),
+ recipientInfos RecipientInfos,
+ encryptedContentInfo
+ EncryptedContentInfo
+} (WITH COMPONENTS { ..., version (edVer0),
+ recipientInfos (WITH COMPONENTS { ..., riSet PRESENT })
+} | WITH COMPONENTS { ..., version (edVer1),
+ recipientInfos (WITH COMPONENTS { ..., riSequence PRESENT })
+})
+
+RecipientInfos ::= CHOICE {
+ riSet SET OF RecipientInfo,
+ riSequence SEQUENCE OF RecipientInfo
+}
+
+EncryptedContentInfo ::= SEQUENCE {
+ contentType ContentType,
+ contentEncryptionAlgorithm
+ ContentEncryptionAlgorithmIdentifier,
+ encryptedContent
+ [0] IMPLICIT EncryptedContent OPTIONAL
+}
+
+EncryptedContent ::= OCTET STRING
+
+RecipientInfo ::= SEQUENCE {
+ version INTEGER {riVer0(0)} (riVer0),
+ issuerAndSerialNumber
+ IssuerAndSerialNumber,
+ keyEncryptionAlgorithm
+ KeyEncryptionAlgorithmIdentifier,
+ encryptedKey EncryptedKey
+}
+
+EncryptedKey ::= OCTET STRING
+
+--
+-- 11. Signed-and-enveloped-data content type
+--
+
+SignedAndEnvelopedData ::= SEQUENCE {
+ version INTEGER {seVer1(1), seVer2(2)} (seVer1 | seVer2),
+ recipientInfos RecipientInfos,
+ digestAlgorithms
+ DigestAlgorithmIdentifiers,
+ encryptedContentInfo
+ EncryptedContentInfo,
+ certificates CHOICE {
+ certSet [0] IMPLICIT ExtendedCertificatesAndCertificates,
+ certSequence [2] IMPLICIT Certificates
+ } OPTIONAL,
+ crls CHOICE {
+ crlSet [1] IMPLICIT CertificateRevocationLists,
+ crlSequence [3] IMPLICIT CRLSequence
+ } OPTIONAL,
+ signerInfos SignerInfos
+} (WITH COMPONENTS { ..., version (seVer1),
+ recipientInfos (WITH COMPONENTS { ..., riSet PRESENT }),
+ digestAlgorithms (WITH COMPONENTS { ..., daSet PRESENT }),
+ certificates (WITH COMPONENTS { ..., certSequence ABSENT }),
+ crls (WITH COMPONENTS { ..., crlSequence ABSENT }),
+ signerInfos (WITH COMPONENTS { ..., siSet PRESENT })
+} |
+ WITH COMPONENTS { ..., version (seVer2),
+ recipientInfos (WITH COMPONENTS { ..., riSequence PRESENT }),
+ digestAlgorithms (WITH COMPONENTS { ..., daSequence PRESENT }),
+ certificates (WITH COMPONENTS { ..., certSet ABSENT }),
+ crls (WITH COMPONENTS { ..., crlSet ABSENT }),
+ signerInfos (WITH COMPONENTS { ..., siSequence PRESENT })
+})
+
+--
+-- 12. Digested-data content type
+--
+
+DigestedData ::= SEQUENCE {
+ version INTEGER {ddVer0(0)} (ddVer0),
+ digestAlgorithm DigestAlgorithmIdentifier,
+ contentInfo ContentInfo,
+ digest Digest
+}
+
+--
+-- 13. Encrypted-data content type
+--
+
+EncryptedData ::= SEQUENCE {
+ version INTEGER {edVer0(0)} (edVer0),
+ encryptedContentInfo EncryptedContentInfo
+}
+
+--
+-- 14. Object Identifiers
+--
+
+pkcs-7 OBJECT IDENTIFIER ::=
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) 7 }
+data OBJECT IDENTIFIER ::= { pkcs-7 1 }
+signedData OBJECT IDENTIFIER ::= { pkcs-7 2 }
+envelopedData OBJECT IDENTIFIER ::= { pkcs-7 3 }
+signedAndEnvelopedData OBJECT IDENTIFIER ::= { pkcs-7 4 }
+digestedData OBJECT IDENTIFIER ::= { pkcs-7 5 }
+encryptedData OBJECT IDENTIFIER ::= { pkcs-7 6 }
+
+END \ No newline at end of file
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-8.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-8.asn1
new file mode 100644
index 0000000000..266f90170a
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-8.asn1
@@ -0,0 +1,61 @@
+PKCS-8 {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-8(8)
+ modules(1) pkcs-8(1)}
+
+-- $Revision: 1.5 $
+
+-- This module has been checked for conformance with the ASN.1
+-- standard by the OSS ASN.1 Tools
+
+DEFINITIONS IMPLICIT TAGS ::=
+
+BEGIN
+
+-- EXPORTS All --
+-- All types and values defined in this module is exported for use in other
+-- ASN.1 modules.
+
+IMPORTS
+
+informationFramework
+ FROM UsefulDefinitions {joint-iso-itu-t(2) ds(5) module(1)
+ usefulDefinitions(0) 3}
+
+Attribute
+ FROM InformationFramework informationFramework
+
+AlgorithmIdentifier, ALGORITHM-IDENTIFIER
+ FROM PKCS-5 {iso(1) member-body(2) us(840) rsadsi(113549)
+ pkcs(1) pkcs-5(5) modules(16) pkcs-5(1)};
+
+-- Private-key information syntax
+
+PrivateKeyInfo ::= SEQUENCE {
+ version Version,
+ privateKeyAlgorithm AlgorithmIdentifier {{PrivateKeyAlgorithms}},
+ privateKey PrivateKey,
+ attributes [0] Attributes OPTIONAL }
+
+Version ::= INTEGER {v1(0)} (v1,...)
+
+PrivateKey ::= OCTET STRING
+
+Attributes ::= SET OF Attribute
+
+-- Encrypted private-key information syntax
+
+EncryptedPrivateKeyInfo ::= SEQUENCE {
+ encryptionAlgorithm AlgorithmIdentifier {{KeyEncryptionAlgorithms}},
+ encryptedData EncryptedData
+}
+
+EncryptedData ::= OCTET STRING
+
+PrivateKeyAlgorithms ALGORITHM-IDENTIFIER ::= {
+ ... -- For local profiles
+}
+
+KeyEncryptionAlgorithms ALGORITHM-IDENTIFIER ::= {
+ ... -- For local profiles
+}
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-9.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-9.asn1
new file mode 100644
index 0000000000..cd561f4d7e
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-9.asn1
@@ -0,0 +1,391 @@
+PKCS-9 {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1)
+pkcs-9(9) modules(0) pkcs-9(1)}
+
+-- $Revision$
+
+DEFINITIONS IMPLICIT TAGS ::=
+
+BEGIN
+
+-- EXPORTS All --
+-- All types and values defined in this module is exported for use in
+-- other ASN.1 modules.
+
+IMPORTS
+
+informationFramework, authenticationFramework, selectedAttributeTypes,
+ upperBounds , id-at
+ FROM UsefulDefinitions {joint-iso-itu-t ds(5) module(1)
+ usefulDefinitions(0) 3}
+
+ub-name
+ FROM UpperBounds upperBounds
+
+OBJECT-CLASS, ATTRIBUTE, MATCHING-RULE, Attribute, top, objectIdentifierMatch
+ FROM InformationFramework informationFramework
+
+ALGORITHM, Extensions, Time
+ FROM AuthenticationFramework authenticationFramework
+
+DirectoryString, octetStringMatch, caseIgnoreMatch, caseExactMatch,
+ generalizedTimeMatch, integerMatch, serialNumber
+ FROM SelectedAttributeTypes selectedAttributeTypes
+
+ContentInfo, SignerInfo
+ FROM CryptographicMessageSyntax-2009 {iso(1) member-body(2) us(840)
+ rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) modules(0) cms(1)}
+
+EncryptedPrivateKeyInfo
+ FROM PKCS-8 {iso(1) member-body(2) us(840) rsadsi(113549)
+ pkcs(1) pkcs-8(8) modules(1) pkcs-8(1)}
+
+PFX
+ FROM PKCS-12 {iso(1) member-body(2) us(840) rsadsi(113549)
+ pkcs(1) pkcs-12(12) modules(0) pkcs-12(1)}
+
+-- PKCS15Token
+-- FROM PKCS-15 {iso(1) member-body(2) us(840) rsadsi(113549)
+-- pkcs(1) pkcs-15(15) modules(1) pkcs-15(1)}
+;
+
+-- Upper bounds
+pkcs-9-ub-pkcs9String INTEGER ::= 255
+pkcs-9-ub-emailAddress INTEGER ::= pkcs-9-ub-pkcs9String
+pkcs-9-ub-unstructuredName INTEGER ::= pkcs-9-ub-pkcs9String
+pkcs-9-ub-unstructuredAddress INTEGER ::= pkcs-9-ub-pkcs9String
+pkcs-9-ub-challengePassword INTEGER ::= pkcs-9-ub-pkcs9String
+pkcs-9-ub-friendlyName INTEGER ::= pkcs-9-ub-pkcs9String
+pkcs-9-ub-signingDescription INTEGER ::= pkcs-9-ub-pkcs9String
+pkcs-9-ub-match INTEGER ::= pkcs-9-ub-pkcs9String
+pkcs-9-ub-pseudonym INTEGER ::= ub-name
+pkcs-9-ub-placeOfBirth INTEGER ::= ub-name
+
+-- Object Identifiers
+
+pkcs-9 OBJECT IDENTIFIER ::= {iso(1) member-body(2) us(840)
+ rsadsi(113549) pkcs(1) 9}
+
+ -- Main arcs
+pkcs-9-mo OBJECT IDENTIFIER ::= {pkcs-9 0} -- Modules branch
+pkcs-9-oc OBJECT IDENTIFIER ::= {pkcs-9 24} -- Object class branch
+pkcs-9-at OBJECT IDENTIFIER ::= {pkcs-9 25} -- Attribute branch, for new attributes
+pkcs-9-sx OBJECT IDENTIFIER ::= {pkcs-9 26} -- For syntaxes (RFC 2252)
+pkcs-9-mr OBJECT IDENTIFIER ::= {pkcs-9 27} -- Matching rules
+
+ -- Object classes
+pkcs-9-oc-pkcsEntity OBJECT IDENTIFIER ::= {pkcs-9-oc 1}
+pkcs-9-oc-naturalPerson OBJECT IDENTIFIER ::= {pkcs-9-oc 2}
+
+ -- Attributes
+pkcs-9-at-emailAddress OBJECT IDENTIFIER ::= {pkcs-9 1}
+pkcs-9-at-unstructuredName OBJECT IDENTIFIER ::= {pkcs-9 2}
+pkcs-9-at-contentType OBJECT IDENTIFIER ::= {pkcs-9 3}
+pkcs-9-at-messageDigest OBJECT IDENTIFIER ::= {pkcs-9 4}
+pkcs-9-at-signingTime OBJECT IDENTIFIER ::= {pkcs-9 5}
+pkcs-9-at-counterSignature OBJECT IDENTIFIER ::= {pkcs-9 6}
+pkcs-9-at-challengePassword OBJECT IDENTIFIER ::= {pkcs-9 7}
+pkcs-9-at-unstructuredAddress OBJECT IDENTIFIER ::= {pkcs-9 8}
+pkcs-9-at-extendedCertificateAttributes OBJECT IDENTIFIER ::= {pkcs-9 9}
+
+-- Obsolete (?) attribute identifiers, purportedly from "tentative
+-- PKCS #9 draft"
+-- pkcs-9-at-issuerAndSerialNumber OBJECT IDENTIFIER ::= {pkcs-9 10}
+-- pkcs-9-at-passwordCheck OBJECT IDENTIFIER ::= {pkcs-9 11}
+-- pkcs-9-at-publicKey OBJECT IDENTIFIER ::= {pkcs-9 12}
+
+pkcs-9-at-signingDescription OBJECT IDENTIFIER ::= {pkcs-9 13}
+pkcs-9-at-extensionRequest OBJECT IDENTIFIER ::= {pkcs-9 14}
+pkcs-9-at-smimeCapabilities OBJECT IDENTIFIER ::= {pkcs-9 15}
+
+-- Unused (?)
+-- pkcs-9-at-? OBJECT IDENTIFIER ::= {pkcs-9 17}
+-- pkcs-9-at-? OBJECT IDENTIFIER ::= {pkcs-9 18}
+-- pkcs-9-at-? OBJECT IDENTIFIER ::= {pkcs-9 19}
+
+pkcs-9-at-friendlyName OBJECT IDENTIFIER ::= {pkcs-9 20}
+pkcs-9-at-localKeyId OBJECT IDENTIFIER ::= {pkcs-9 21}
+pkcs-9-at-userPKCS12 OBJECT IDENTIFIER ::= {2 16 840 1 113730 3 1 216}
+pkcs-9-at-pkcs15Token OBJECT IDENTIFIER ::= {pkcs-9-at 1}
+pkcs-9-at-encryptedPrivateKeyInfo OBJECT IDENTIFIER ::= {pkcs-9-at 2}
+pkcs-9-at-randomNonce OBJECT IDENTIFIER ::= {pkcs-9-at 3}
+pkcs-9-at-sequenceNumber OBJECT IDENTIFIER ::= {pkcs-9-at 4}
+pkcs-9-at-pkcs7PDU OBJECT IDENTIFIER ::= {pkcs-9-at 5}
+
+ -- IETF PKIX Attribute branch
+ietf-at OBJECT IDENTIFIER ::= {1 3 6 1 5 5 7 9}
+
+pkcs-9-at-dateOfBirth OBJECT IDENTIFIER ::= {ietf-at 1}
+pkcs-9-at-placeOfBirth OBJECT IDENTIFIER ::= {ietf-at 2}
+pkcs-9-at-gender OBJECT IDENTIFIER ::= {ietf-at 3}
+pkcs-9-at-countryOfCitizenship OBJECT IDENTIFIER ::= {ietf-at 4}
+pkcs-9-at-countryOfResidence OBJECT IDENTIFIER ::= {ietf-at 5}
+
+ -- Syntaxes (for use with LDAP accessible directories)
+pkcs-9-sx-pkcs9String OBJECT IDENTIFIER ::= {pkcs-9-sx 1}
+pkcs-9-sx-signingTime OBJECT IDENTIFIER ::= {pkcs-9-sx 2}
+
+ -- Matching rules
+pkcs-9-mr-caseIgnoreMatch OBJECT IDENTIFIER ::= {pkcs-9-mr 1}
+pkcs-9-mr-signingTimeMatch OBJECT IDENTIFIER ::= {pkcs-9-mr 2}
+
+ -- Arcs with attributes defined elsewhere
+smime OBJECT IDENTIFIER ::= {pkcs-9 16}
+ -- Main arc for S/MIME (RFC 2633)
+certTypes OBJECT IDENTIFIER ::= {pkcs-9 22}
+ -- Main arc for certificate types defined in PKCS #12
+crlTypes OBJECT IDENTIFIER ::= {pkcs-9 23}
+ -- Main arc for crl types defined in PKCS #12
+
+ -- Other object identifiers
+id-at-pseudonym OBJECT IDENTIFIER ::= {id-at 65}
+
+-- Useful types
+
+PKCS9String {INTEGER : maxSize} ::= CHOICE {
+ ia5String IA5String (SIZE(1..maxSize)),
+ directoryString DirectoryString {maxSize}
+}
+
+-- Object classes
+
+pkcsEntity OBJECT-CLASS ::= {
+ SUBCLASS OF { top }
+ KIND auxiliary
+ MAY CONTAIN { PKCSEntityAttributeSet }
+ ID pkcs-9-oc-pkcsEntity
+}
+
+naturalPerson OBJECT-CLASS ::= {
+ SUBCLASS OF { top }
+ KIND auxiliary
+ MAY CONTAIN { NaturalPersonAttributeSet }
+ ID pkcs-9-oc-naturalPerson
+}
+
+-- Attribute sets
+
+PKCSEntityAttributeSet ATTRIBUTE ::= {
+ pKCS7PDU |
+ userPKCS12 |
+-- pKCS15Token |
+ encryptedPrivateKeyInfo,
+ ... -- For future extensions
+}
+
+NaturalPersonAttributeSet ATTRIBUTE ::= {
+ emailAddress |
+ unstructuredName |
+ unstructuredAddress |
+ dateOfBirth |
+ placeOfBirth |
+ gender |
+ countryOfCitizenship |
+ countryOfResidence |
+ pseudonym |
+ serialNumber,
+ ... -- For future extensions
+}
+
+-- Attributes
+
+pKCS7PDU ATTRIBUTE ::= {
+ WITH SYNTAX ContentInfo
+ ID pkcs-9-at-pkcs7PDU
+}
+
+userPKCS12 ATTRIBUTE ::= {
+ WITH SYNTAX PFX
+ ID pkcs-9-at-userPKCS12
+}
+
+-- pKCS15Token ATTRIBUTE ::= {
+-- WITH SYNTAX PKCS15Token
+-- ID pkcs-9-at-pkcs15Token
+-- }
+
+encryptedPrivateKeyInfo ATTRIBUTE ::= {
+ WITH SYNTAX EncryptedPrivateKeyInfo
+ ID pkcs-9-at-encryptedPrivateKeyInfo
+}
+
+emailAddress ATTRIBUTE ::= {
+ WITH SYNTAX IA5String (SIZE(1..pkcs-9-ub-emailAddress))
+ EQUALITY MATCHING RULE pkcs9CaseIgnoreMatch
+ ID pkcs-9-at-emailAddress
+}
+
+unstructuredName ATTRIBUTE ::= {
+ WITH SYNTAX PKCS9String {pkcs-9-ub-unstructuredName}
+ EQUALITY MATCHING RULE pkcs9CaseIgnoreMatch
+ ID pkcs-9-at-unstructuredName
+}
+
+unstructuredAddress ATTRIBUTE ::= {
+ WITH SYNTAX DirectoryString {pkcs-9-ub-unstructuredAddress}
+ EQUALITY MATCHING RULE caseIgnoreMatch
+ ID pkcs-9-at-unstructuredAddress
+}
+
+dateOfBirth ATTRIBUTE ::= {
+ WITH SYNTAX GeneralizedTime
+ EQUALITY MATCHING RULE generalizedTimeMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-dateOfBirth
+}
+
+placeOfBirth ATTRIBUTE ::= {
+ WITH SYNTAX DirectoryString {pkcs-9-ub-placeOfBirth}
+ EQUALITY MATCHING RULE caseExactMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-placeOfBirth
+}
+
+gender ATTRIBUTE ::= {
+ WITH SYNTAX PrintableString (SIZE(1) ^ FROM ("M" | "F" | "m" | "f"))
+ EQUALITY MATCHING RULE caseIgnoreMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-gender
+}
+
+countryOfCitizenship ATTRIBUTE ::= {
+ WITH SYNTAX PrintableString (SIZE(2))(CONSTRAINED BY {
+ -- Must be a two-letter country acronym in accordance with
+ -- ISO/IEC 3166 --})
+ EQUALITY MATCHING RULE caseIgnoreMatch
+ ID pkcs-9-at-countryOfCitizenship
+}
+
+countryOfResidence ATTRIBUTE ::= {
+ WITH SYNTAX PrintableString (SIZE(2))(CONSTRAINED BY {
+ -- Must be a two-letter country acronym in accordance with
+ -- ISO/IEC 3166 --})
+ EQUALITY MATCHING RULE caseIgnoreMatch
+ ID pkcs-9-at-countryOfResidence
+}
+
+pseudonym ATTRIBUTE ::= {
+ WITH SYNTAX DirectoryString {pkcs-9-ub-pseudonym}
+ EQUALITY MATCHING RULE caseExactMatch
+ ID id-at-pseudonym
+}
+
+contentType ATTRIBUTE ::= {
+ WITH SYNTAX ContentType
+ EQUALITY MATCHING RULE objectIdentifierMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-contentType
+}
+
+ContentType ::= OBJECT IDENTIFIER
+
+messageDigest ATTRIBUTE ::= {
+ WITH SYNTAX MessageDigest
+ EQUALITY MATCHING RULE octetStringMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-messageDigest
+}
+
+MessageDigest ::= OCTET STRING
+
+signingTime ATTRIBUTE ::= {
+ WITH SYNTAX SigningTime
+ EQUALITY MATCHING RULE signingTimeMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-signingTime
+}
+
+SigningTime ::= Time -- imported from ISO/IEC 9594-8
+
+randomNonce ATTRIBUTE ::= {
+ WITH SYNTAX RandomNonce
+ EQUALITY MATCHING RULE octetStringMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-randomNonce
+}
+
+RandomNonce ::= OCTET STRING (SIZE(4..MAX)) -- At least four bytes long
+
+sequenceNumber ATTRIBUTE ::= {
+ WITH SYNTAX SequenceNumber
+ EQUALITY MATCHING RULE integerMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-sequenceNumber
+}
+
+SequenceNumber ::= INTEGER (1..MAX)
+
+counterSignature ATTRIBUTE ::= {
+ WITH SYNTAX SignerInfo
+ ID pkcs-9-at-counterSignature
+}
+
+challengePassword ATTRIBUTE ::= {
+ WITH SYNTAX DirectoryString {pkcs-9-ub-challengePassword}
+ EQUALITY MATCHING RULE caseExactMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-challengePassword
+}
+
+extensionRequest ATTRIBUTE ::= {
+ WITH SYNTAX ExtensionRequest
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-extensionRequest
+}
+
+ExtensionRequest ::= Extensions
+
+extendedCertificateAttributes ATTRIBUTE ::= {
+ WITH SYNTAX SET OF Attribute
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-extendedCertificateAttributes
+}
+
+friendlyName ATTRIBUTE ::= {
+ WITH SYNTAX BMPString (SIZE(1..pkcs-9-ub-friendlyName))
+ EQUALITY MATCHING RULE caseIgnoreMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-friendlyName
+}
+
+localKeyId ATTRIBUTE ::= {
+ WITH SYNTAX OCTET STRING
+ EQUALITY MATCHING RULE octetStringMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-localKeyId
+}
+
+signingDescription ATTRIBUTE ::= {
+ WITH SYNTAX DirectoryString {pkcs-9-ub-signingDescription}
+ EQUALITY MATCHING RULE caseIgnoreMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-signingDescription
+}
+
+smimeCapabilities ATTRIBUTE ::= {
+ WITH SYNTAX SMIMECapabilities
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-smimeCapabilities
+}
+
+SMIMECapabilities ::= SEQUENCE OF SMIMECapability
+
+SMIMECapability ::= SEQUENCE {
+ algorithm ALGORITHM.&id ({SMIMEv3Algorithms}),
+ parameters ALGORITHM.&Type ({SMIMEv3Algorithms}{@algorithm})
+}
+
+SMIMEv3Algorithms ALGORITHM ::= {...-- See RFC 2633 --}
+
+ -- Matching rules
+
+pkcs9CaseIgnoreMatch MATCHING-RULE ::= {
+ SYNTAX PKCS9String {pkcs-9-ub-match}
+ ID pkcs-9-mr-caseIgnoreMatch
+}
+
+signingTimeMatch MATCHING-RULE ::= {
+ SYNTAX SigningTime
+ ID pkcs-9-mr-signingTimeMatch
+}
+
+END \ No newline at end of file
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/PKCS7BodyPartType.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS7BodyPartType.asn1
index 525ee3c5ec..1bcc2281a1 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/PKCS7BodyPartType.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS7BodyPartType.asn1
@@ -6,7 +6,7 @@ BEGIN
IMPORTS
-- PKCS#7
ContentInfo
- FROM PKCS7 {iso(1) member-body(2) usa(840) rsadsi(113549) pkcs(1)
+ FROM PKCS-7 {iso(1) member-body(2) usa(840) rsadsi(113549) pkcs(1)
7 module(0)}
-- module not formally defined in the PKCS#7document, therefore defined in Annex O
-- IPMS Information Objects
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-CommonTypes-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-CommonTypes-2009.asn1
new file mode 100644
index 0000000000..fde5bddbf3
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-CommonTypes-2009.asn1
@@ -0,0 +1,166 @@
+ PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57)}
+
+ DEFINITIONS EXPLICIT TAGS ::=
+ BEGIN
+
+ -- ATTRIBUTE
+ --
+ -- Describe the set of data associated with an attribute of some type
+ --
+ -- &id is an OID identifying the attribute
+ -- &Type is the ASN.1 type structure for the attribute; not all
+ -- attributes have a data structure, so this field is optional
+ -- &minCount contains the minimum number of times the attribute can
+ -- occur in an AttributeSet
+ -- &maxCount contains the maximum number of times the attribute can
+ -- appear in an AttributeSet
+ -- Note: this cannot be automatically enforced as the field
+ -- cannot be defaulted to MAX.
+ -- &equality-match contains information about how matching should be
+ -- done
+ --
+ -- Currently we are using two different prefixes for attributes.
+ --
+ -- at- for certificate attributes
+ -- aa- for CMS attributes
+ --
+
+ ATTRIBUTE ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Type OPTIONAL,
+ &equality-match MATCHING-RULE OPTIONAL,
+ &minCount INTEGER DEFAULT 1,
+ &maxCount INTEGER OPTIONAL
+ } WITH SYNTAX {
+ [TYPE &Type]
+ [EQUALITY MATCHING RULE &equality-match]
+ [COUNTS [MIN &minCount] [MAX &maxCount]]
+ IDENTIFIED BY &id
+ }
+
+ -- Specification of MATCHING-RULE information object class
+ --
+
+ MATCHING-RULE ::= CLASS {
+ &ParentMatchingRules MATCHING-RULE OPTIONAL,
+ &AssertionType OPTIONAL,
+ &uniqueMatchIndicator ATTRIBUTE OPTIONAL,
+ &id OBJECT IDENTIFIER UNIQUE
+ }
+ WITH SYNTAX {
+ [PARENT &ParentMatchingRules]
+ [SYNTAX &AssertionType]
+ [UNIQUE-MATCH-INDICATOR &uniqueMatchIndicator]
+ ID &id
+ }
+
+ -- AttributeSet
+ --
+ -- Used when a set of attributes is to occur.
+ --
+ -- type contains the identifier of the attribute
+ -- values contains a set of values where the structure of the ASN.1
+ -- is defined by the attribute
+ --
+ -- The parameter contains the set of objects describing
+ -- those attributes that can occur in this location.
+ --
+
+ AttributeSet{ATTRIBUTE:AttrSet} ::= SEQUENCE {
+ type ATTRIBUTE.&id({AttrSet}),
+ values SET SIZE (1..MAX) OF ATTRIBUTE.
+ &Type({AttrSet}{@type})
+ }
+
+ -- SingleAttribute
+ --
+ -- Used for a single valued attribute
+ --
+ -- The parameter contains the set of objects describing the
+ -- attributes that can occur in this location
+ --
+
+ SingleAttribute{ATTRIBUTE:AttrSet} ::= SEQUENCE {
+ type ATTRIBUTE.&id({AttrSet}),
+ value ATTRIBUTE.&Type({AttrSet}{@type})
+ }
+
+ -- EXTENSION
+ --
+ -- This class definition is used to describe the association of
+ -- object identifier and ASN.1 type structure for extensions
+ --
+ -- All extensions are prefixed with ext-
+ --
+ -- &id contains the object identifier for the extension
+ -- &ExtnType specifies the ASN.1 type structure for the extension
+ -- &Critical contains the set of legal values for the critical field.
+ -- This is normally {TRUE|FALSE} but in some instances may be
+ -- restricted to just one of these values.
+ --
+
+ EXTENSION ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &ExtnType,
+ &Critical BOOLEAN DEFAULT {TRUE | FALSE }
+ } WITH SYNTAX {
+ SYNTAX &ExtnType IDENTIFIED BY &id
+ [CRITICALITY &Critical]
+ }
+
+ -- Extensions
+ --
+ -- Used for a sequence of extensions.
+ --
+ -- The parameter contains the set of legal extensions that can
+ -- occur in this sequence.
+ --
+
+ Extensions{EXTENSION:ExtensionSet} ::=
+ SEQUENCE SIZE (1..MAX) OF Extension{{ExtensionSet}}
+
+ -- Extension
+ --
+ -- Used for a single extension
+ --
+ -- The parameter contains the set of legal extensions that can
+ -- occur in this extension.
+ --
+ -- The restriction on the critical field has been commented out
+ -- the authors are not completely sure it is correct.
+ -- The restriction could be done using custom code rather than
+ -- compiler-generated code, however.
+ --
+
+ Extension{EXTENSION:ExtensionSet} ::= SEQUENCE {
+ extnID EXTENSION.&id({ExtensionSet}),
+ critical BOOLEAN
+ -- (EXTENSION.&Critical({ExtensionSet}{@extnID}))
+ DEFAULT FALSE,
+ extnValue OCTET STRING (CONTAINING
+ EXTENSION.&ExtnType({ExtensionSet}{@extnID}))
+ -- contains the DER encoding of the ASN.1 value
+ -- corresponding to the extension type identified
+ -- by extnID
+ }
+
+ -- Security Category
+ --
+ -- Security categories are used both for specifying clearances and
+ -- for labeling objects. We move this here from RFC 3281 so that
+ -- they will use a common single object class to express this
+ -- information.
+ --
+
+ SECURITY-CATEGORY ::= TYPE-IDENTIFIER
+
+ SecurityCategory{SECURITY-CATEGORY:Supported} ::= SEQUENCE {
+ type [0] IMPLICIT SECURITY-CATEGORY.
+ &id({Supported}),
+ value [1] EXPLICIT SECURITY-CATEGORY.
+ &Type({Supported}{@type})
+ }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-X400Address-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-X400Address-2009.asn1
new file mode 100644
index 0000000000..41cbaea67e
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-X400Address-2009.asn1
@@ -0,0 +1,300 @@
+ --
+ -- This module is used to isolate all the X.400 naming information.
+ -- There is no reason to expect this to occur in a PKIX certificate.
+ --
+
+ PKIX-X400Address-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-x400address-02(60) }
+ DEFINITIONS EXPLICIT TAGS ::=
+ BEGIN
+
+ -- X.400 address syntax starts here
+
+ ORAddress ::= SEQUENCE {
+ built-in-standard-attributes BuiltInStandardAttributes,
+ built-in-domain-defined-attributes
+ BuiltInDomainDefinedAttributes OPTIONAL,
+
+ -- see also teletex-domain-defined-attributes
+ extension-attributes ExtensionAttributes OPTIONAL }
+
+ -- Built-in Standard Attributes
+
+ BuiltInStandardAttributes ::= SEQUENCE {
+ country-name CountryName OPTIONAL,
+ administration-domain-name AdministrationDomainName OPTIONAL,
+ network-address [0] IMPLICIT NetworkAddress OPTIONAL,
+ -- see also extended-network-address
+ terminal-identifier [1] IMPLICIT TerminalIdentifier OPTIONAL,
+ private-domain-name [2] PrivateDomainName OPTIONAL,
+ organization-name [3] IMPLICIT OrganizationName OPTIONAL,
+ -- see also teletex-organization-name
+ numeric-user-identifier [4] IMPLICIT NumericUserIdentifier
+ OPTIONAL,
+ personal-name [5] IMPLICIT PersonalName OPTIONAL,
+ -- see also teletex-personal-name
+ organizational-unit-names [6] IMPLICIT OrganizationalUnitNames
+ OPTIONAL }
+ -- see also teletex-organizational-unit-names
+
+ CountryName ::= [APPLICATION 1] CHOICE {
+ x121-dcc-code NumericString
+ (SIZE (ub-country-name-numeric-length)),
+ iso-3166-alpha2-code PrintableString
+ (SIZE (ub-country-name-alpha-length)) }
+
+ AdministrationDomainName ::= [APPLICATION 2] CHOICE {
+ numeric NumericString (SIZE (0..ub-domain-name-length)),
+ printable PrintableString (SIZE (0..ub-domain-name-length)) }
+
+ NetworkAddress ::= X121Address -- see also extended-network-address
+
+ X121Address ::= NumericString (SIZE (1..ub-x121-address-length))
+
+ TerminalIdentifier ::= PrintableString (SIZE
+ (1..ub-terminal-id-length))
+
+ PrivateDomainName ::= CHOICE {
+ numeric NumericString (SIZE (1..ub-domain-name-length)),
+ printable PrintableString (SIZE (1..ub-domain-name-length)) }
+
+ OrganizationName ::= PrintableString
+ (SIZE (1..ub-organization-name-length))
+ -- see also teletex-organization-name
+
+ NumericUserIdentifier ::= NumericString
+ (SIZE (1..ub-numeric-user-id-length))
+
+ PersonalName ::= SET {
+ surname [0] IMPLICIT PrintableString
+ (SIZE (1..ub-surname-length)),
+ given-name [1] IMPLICIT PrintableString
+ (SIZE (1..ub-given-name-length)) OPTIONAL,
+ initials [2] IMPLICIT PrintableString
+ (SIZE (1..ub-initials-length)) OPTIONAL,
+ generation-qualifier [3] IMPLICIT PrintableString
+ (SIZE (1..ub-generation-qualifier-length))
+ OPTIONAL }
+ -- see also teletex-personal-name
+
+ OrganizationalUnitNames ::= SEQUENCE SIZE (1..ub-organizational-units)
+ OF OrganizationalUnitName
+ -- see also teletex-organizational-unit-names
+
+ OrganizationalUnitName ::= PrintableString (SIZE
+ (1..ub-organizational-unit-name-length))
+
+ -- Built-in Domain-defined Attributes
+
+ BuiltInDomainDefinedAttributes ::= SEQUENCE SIZE
+ (1..ub-domain-defined-attributes) OF
+ BuiltInDomainDefinedAttribute
+
+ BuiltInDomainDefinedAttribute ::= SEQUENCE {
+ type PrintableString (SIZE
+ (1..ub-domain-defined-attribute-type-length)),
+ value PrintableString (SIZE
+ (1..ub-domain-defined-attribute-value-length)) }
+
+ -- Extension Attributes
+
+ ExtensionAttributes ::= SET SIZE (1..ub-extension-attributes) OF
+ ExtensionAttribute
+
+ EXTENSION-ATTRIBUTE ::= CLASS {
+ &id INTEGER (0..ub-extension-attributes) UNIQUE,
+ &Type
+ } WITH SYNTAX { &Type IDENTIFIED BY &id }
+
+ ExtensionAttribute ::= SEQUENCE {
+ extension-attribute-type [0] IMPLICIT EXTENSION-ATTRIBUTE.
+ &id({SupportedExtensionAttributes}),
+ extension-attribute-value [1] EXTENSION-ATTRIBUTE.
+ &Type({SupportedExtensionAttributes}
+ {@extension-attribute-type})}
+
+ SupportedExtensionAttributes EXTENSION-ATTRIBUTE ::= {
+ ea-commonName | ea-teletexCommonName | ea-teletexOrganizationName
+ | ea-teletexPersonalName | ea-teletexOrganizationalUnitNames |
+ ea-pDSName | ea-physicalDeliveryCountryName | ea-postalCode |
+ ea-physicalDeliveryOfficeName | ea-physicalDeliveryOfficeNumber |
+ ea-extensionORAddressComponents | ea-physicalDeliveryPersonalName
+ | ea-physicalDeliveryOrganizationName |
+ ea-extensionPhysicalDeliveryAddressComponents |
+ ea-unformattedPostalAddress | ea-streetAddress |
+ ea-postOfficeBoxAddress | ea-posteRestanteAddress |
+ ea-uniquePostalName | ea-localPostalAttributes |
+ ea-extendedNetworkAddress | ea-terminalType |
+ ea-teletexDomainDefinedAttributes, ... }
+
+ -- Extension types and attribute values
+
+ ea-commonName EXTENSION-ATTRIBUTE ::= { PrintableString
+ (SIZE (1..ub-common-name-length)) IDENTIFIED BY 1 }
+
+ ea-teletexCommonName EXTENSION-ATTRIBUTE ::= {TeletexString
+ (SIZE (1..ub-common-name-length)) IDENTIFIED BY 2 }
+
+ ea-teletexOrganizationName EXTENSION-ATTRIBUTE::= { TeletexString
+ (SIZE (1..ub-organization-name-length)) IDENTIFIED BY 3 }
+
+ ea-teletexPersonalName EXTENSION-ATTRIBUTE ::= {SET {
+ surname [0] IMPLICIT TeletexString
+ (SIZE (1..ub-surname-length)),
+ given-name [1] IMPLICIT TeletexString
+ (SIZE (1..ub-given-name-length)) OPTIONAL,
+ initials [2] IMPLICIT TeletexString
+ (SIZE (1..ub-initials-length)) OPTIONAL,
+ generation-qualifier [3] IMPLICIT TeletexString
+ (SIZE (1..ub-generation-qualifier-length))
+ OPTIONAL } IDENTIFIED BY 4 }
+
+ ea-teletexOrganizationalUnitNames EXTENSION-ATTRIBUTE ::=
+ { SEQUENCE SIZE (1..ub-organizational-units) OF
+ TeletexOrganizationalUnitName IDENTIFIED BY 5 }
+
+ TeletexOrganizationalUnitName ::= TeletexString
+ (SIZE (1..ub-organizational-unit-name-length))
+
+ ea-pDSName EXTENSION-ATTRIBUTE ::= {PrintableString
+ (SIZE (1..ub-pds-name-length)) IDENTIFIED BY 7 }
+
+ ea-physicalDeliveryCountryName EXTENSION-ATTRIBUTE ::= { CHOICE {
+ x121-dcc-code NumericString (SIZE
+ (ub-country-name-numeric-length)),
+ iso-3166-alpha2-code PrintableString
+ (SIZE (ub-country-name-alpha-length)) }
+ IDENTIFIED BY 8 }
+
+ ea-postalCode EXTENSION-ATTRIBUTE ::= { CHOICE {
+ numeric-code NumericString (SIZE (1..ub-postal-code-length)),
+ printable-code PrintableString (SIZE (1..ub-postal-code-length)) }
+ IDENTIFIED BY 9 }
+
+ ea-physicalDeliveryOfficeName EXTENSION-ATTRIBUTE ::=
+ { PDSParameter IDENTIFIED BY 10 }
+
+ ea-physicalDeliveryOfficeNumber EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 11 }
+
+ ea-extensionORAddressComponents EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 12 }
+
+ ea-physicalDeliveryPersonalName EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 13}
+
+ ea-physicalDeliveryOrganizationName EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 14 }
+
+ ea-extensionPhysicalDeliveryAddressComponents EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 15 }
+
+ ea-unformattedPostalAddress EXTENSION-ATTRIBUTE ::= { SET {
+ printable-address SEQUENCE SIZE (1..ub-pds-physical-address-lines)
+ OF PrintableString (SIZE (1..ub-pds-parameter-length))
+ OPTIONAL,
+ teletex-string TeletexString
+ (SIZE (1..ub-unformatted-address-length)) OPTIONAL }
+ IDENTIFIED BY 16 }
+
+ ea-streetAddress EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 17 }
+
+ ea-postOfficeBoxAddress EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 18 }
+
+ ea-posteRestanteAddress EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 19 }
+
+ ea-uniquePostalName EXTENSION-ATTRIBUTE ::=
+ { PDSParameter IDENTIFIED BY 20 }
+
+ ea-localPostalAttributes EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 21 }
+ PDSParameter ::= SET {
+ printable-string PrintableString
+ (SIZE(1..ub-pds-parameter-length)) OPTIONAL,
+ teletex-string TeletexString
+ (SIZE(1..ub-pds-parameter-length)) OPTIONAL }
+
+ ea-extendedNetworkAddress EXTENSION-ATTRIBUTE ::= {
+ CHOICE {
+ e163-4-address SEQUENCE {
+ number [0] IMPLICIT NumericString
+ (SIZE (1..ub-e163-4-number-length)),
+ sub-address [1] IMPLICIT NumericString
+ (SIZE (1..ub-e163-4-sub-address-length)) OPTIONAL
+ },
+ psap-address [0] IMPLICIT PresentationAddress
+ } IDENTIFIED BY 22
+ }
+
+ PresentationAddress ::= SEQUENCE {
+ pSelector [0] EXPLICIT OCTET STRING OPTIONAL,
+ sSelector [1] EXPLICIT OCTET STRING OPTIONAL,
+ tSelector [2] EXPLICIT OCTET STRING OPTIONAL,
+ nAddresses [3] EXPLICIT SET SIZE (1..MAX) OF OCTET STRING }
+
+ ea-terminalType EXTENSION-ATTRIBUTE ::= {INTEGER {
+ telex (3),
+ teletex (4),
+ g3-facsimile (5),
+ g4-facsimile (6),
+ ia5-terminal (7),
+ videotex (8) } (0..ub-integer-options)
+ IDENTIFIED BY 23 }
+
+ -- Extension Domain-defined Attributes
+
+ ea-teletexDomainDefinedAttributes EXTENSION-ATTRIBUTE ::=
+ { SEQUENCE SIZE (1..ub-domain-defined-attributes) OF
+ TeletexDomainDefinedAttribute IDENTIFIED BY 6 }
+
+ TeletexDomainDefinedAttribute ::= SEQUENCE {
+ type TeletexString
+ (SIZE (1..ub-domain-defined-attribute-type-length)),
+ value TeletexString
+ (SIZE (1..ub-domain-defined-attribute-value-length)) }
+
+ -- specifications of Upper Bounds MUST be regarded as mandatory
+ -- from Annex B of ITU-T X.411 Reference Definition of MTS Parameter
+ -- Upper Bounds
+ -- Upper Bounds
+ ub-match INTEGER ::= 128
+ ub-common-name-length INTEGER ::= 64
+ ub-country-name-alpha-length INTEGER ::= 2
+ ub-country-name-numeric-length INTEGER ::= 3
+ ub-domain-defined-attributes INTEGER ::= 4
+ ub-domain-defined-attribute-type-length INTEGER ::= 8
+ ub-domain-defined-attribute-value-length INTEGER ::= 128
+ ub-domain-name-length INTEGER ::= 16
+ ub-extension-attributes INTEGER ::= 256
+ ub-e163-4-number-length INTEGER ::= 15
+ ub-e163-4-sub-address-length INTEGER ::= 40
+ ub-generation-qualifier-length INTEGER ::= 3
+ ub-given-name-length INTEGER ::= 16
+ ub-initials-length INTEGER ::= 5
+ ub-integer-options INTEGER ::= 256
+ ub-numeric-user-id-length INTEGER ::= 32
+ ub-organization-name-length INTEGER ::= 64
+ ub-organizational-unit-name-length INTEGER ::= 32
+ ub-organizational-units INTEGER ::= 4
+ ub-pds-name-length INTEGER ::= 16
+ ub-pds-parameter-length INTEGER ::= 30
+ ub-pds-physical-address-lines INTEGER ::= 6
+ ub-postal-code-length INTEGER ::= 16
+ ub-surname-length INTEGER ::= 40
+ ub-terminal-id-length INTEGER ::= 24
+ ub-unformatted-address-length INTEGER ::= 180
+ ub-x121-address-length INTEGER ::= 16
+
+ -- Note - upper bounds on string types, such as TeletexString, are
+ -- measured in characters. Excepting PrintableString or IA5String, a
+ -- significantly greater number of octets will be required to hold
+ -- such a value. As a minimum, 16 octets or twice the specified
+ -- upper bound, whichever is the larger, should be allowed for
+ -- TeletexString. For UTF8String or UniversalString, at least four
+ -- times the upper bound should be allowed.
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1-PSS-OAEP-Algorithms-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1-PSS-OAEP-Algorithms-2009.asn1
new file mode 100644
index 0000000000..b1232fb8f2
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1-PSS-OAEP-Algorithms-2009.asn1
@@ -0,0 +1,308 @@
+ PKIX1-PSS-OAEP-Algorithms-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-rsa-pkalgs-02(54)}
+ DEFINITIONS EXPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ AlgorithmIdentifier{}, ALGORITHM, DIGEST-ALGORITHM, KEY-TRANSPORT,
+ SIGNATURE-ALGORITHM, PUBLIC-KEY, SMIME-CAPS
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ id-sha1, mda-sha1, pk-rsa, RSAPublicKey
+ FROM PKIXAlgs-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-algorithms2008-02(56)};
+
+ -- ============================
+ -- Object Set exports
+ -- ============================
+ --
+ -- Define top-level symbols with all of the objects defined for
+ -- export to other modules. These objects would be included as part
+ -- of an Object Set to restrict the set of legal values.
+ --
+
+ PublicKeys PUBLIC-KEY ::= { pk-rsaSSA-PSS | pk-rsaES-OAEP, ... }
+ SignatureAlgs SIGNATURE-ALGORITHM ::= { sa-rsaSSA-PSS, ...}
+ KeyTransportAlgs KEY-TRANSPORT ::= { kta-rsaES-OAEP, ... }
+ HashAlgs DIGEST-ALGORITHM ::= { mda-sha224 | mda-sha256 | mda-sha384
+ | mda-sha512, ... }
+ SMimeCaps SMIME-CAPS ::= {
+ sa-rsaSSA-PSS.&smimeCaps |
+ kta-rsaES-OAEP.&smimeCaps,
+ ...
+ }
+
+ -- =============================
+ -- Algorithm Objects
+ -- =============================
+
+ --
+ -- Public key object for PSS signatures
+ --
+
+ pk-rsaSSA-PSS PUBLIC-KEY ::= {
+ IDENTIFIER id-RSASSA-PSS
+ KEY RSAPublicKey
+ PARAMS TYPE RSASSA-PSS-params ARE optional
+ -- Private key format not in this module --
+ CERT-KEY-USAGE { nonRepudiation, digitalSignature,
+ keyCertSign, cRLSign }
+ }
+
+ --
+ -- Signature algorithm definition for PSS signatures
+ --
+
+ sa-rsaSSA-PSS SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER id-RSASSA-PSS
+ PARAMS TYPE RSASSA-PSS-params ARE required
+ HASHES { mda-sha1 | mda-sha224 | mda-sha256 | mda-sha384
+ | mda-sha512 }
+ PUBLIC-KEYS { pk-rsa | pk-rsaSSA-PSS }
+ SMIME-CAPS { IDENTIFIED BY id-RSASSA-PSS }
+ }
+
+ --
+ -- Signature algorithm definitions for PKCS v1.5 signatures
+ --
+
+ sa-sha224WithRSAEncryption SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER sha224WithRSAEncryption
+ PARAMS TYPE NULL ARE required
+ HASHES { mda-sha224 }
+ PUBLIC-KEYS { pk-rsa }
+ SMIME-CAPS { IDENTIFIED BY sha224WithRSAEncryption }
+ }
+ sha224WithRSAEncryption OBJECT IDENTIFIER ::= { pkcs-1 14 }
+
+ sa-sha256WithRSAEncryption SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER sha256WithRSAEncryption
+ PARAMS TYPE NULL ARE required
+ HASHES { mda-sha256 }
+ PUBLIC-KEYS { pk-rsa }
+ SMIME-CAPS { IDENTIFIED BY sha256WithRSAEncryption }
+ }
+ sha256WithRSAEncryption OBJECT IDENTIFIER ::= { pkcs-1 11 }
+
+ sa-sha384WithRSAEncryption SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER sha384WithRSAEncryption
+ PARAMS TYPE NULL ARE required
+ HASHES { mda-sha384 }
+ PUBLIC-KEYS { pk-rsa }
+ SMIME-CAPS { IDENTIFIED BY sha384WithRSAEncryption }
+ }
+ sha384WithRSAEncryption OBJECT IDENTIFIER ::= { pkcs-1 12 }
+
+ sa-sha512WithRSAEncryption SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER sha512WithRSAEncryption
+ PARAMS TYPE NULL ARE required
+ HASHES { mda-sha512 }
+ PUBLIC-KEYS { pk-rsa }
+ SMIME-CAPS { IDENTIFIED BY sha512WithRSAEncryption }
+ }
+ sha512WithRSAEncryption OBJECT IDENTIFIER ::= { pkcs-1 13 }
+
+ --
+ -- Public key definition for OAEP encryption
+ --
+
+ pk-rsaES-OAEP PUBLIC-KEY ::= {
+ IDENTIFIER id-RSAES-OAEP
+ KEY RSAPublicKey
+ PARAMS TYPE RSAES-OAEP-params ARE optional
+ -- Private key format not in this module --
+ CERT-KEY-USAGE {keyEncipherment, dataEncipherment}
+ }
+
+ --
+ -- Key transport key lock definition for OAEP encryption
+ --
+
+ kta-rsaES-OAEP KEY-TRANSPORT ::= {
+ IDENTIFIER id-RSAES-OAEP
+ PARAMS TYPE RSAES-OAEP-params ARE required
+ PUBLIC-KEYS { pk-rsa | pk-rsaES-OAEP }
+ SMIME-CAPS { TYPE RSAES-OAEP-params IDENTIFIED BY id-RSAES-OAEP}
+ }
+ -- ============================
+ -- Basic object identifiers
+ -- ============================
+
+ pkcs-1 OBJECT IDENTIFIER ::=
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) 1 }
+
+ -- When rsaEncryption is used in an AlgorithmIdentifier, the
+ -- parameters MUST be present and MUST be NULL.
+ -- rsaEncryption OBJECT IDENTIFIER ::= { pkcs-1 1 }
+
+ -- When id-RSAES-OAEP is used in an AlgorithmIdentifier,
+ -- and the parameters field is present, it MUST be
+ -- RSAES-OAEP-params.
+
+ id-RSAES-OAEP OBJECT IDENTIFIER ::= { pkcs-1 7 }
+
+ -- When id-mgf1 is used in an AlgorithmIdentifier, the parameters
+ -- MUST be present and MUST be a HashAlgorithm.
+
+ id-mgf1 OBJECT IDENTIFIER ::= { pkcs-1 8 }
+
+ -- When id-pSpecified is used in an AlgorithmIdentifier, the
+ -- parameters MUST be an OCTET STRING.
+
+ id-pSpecified OBJECT IDENTIFIER ::= { pkcs-1 9 }
+
+ -- When id-RSASSA-PSS is used in an AlgorithmIdentifier, and the
+ -- parameters field is present, it MUST be RSASSA-PSS-params.
+
+ id-RSASSA-PSS OBJECT IDENTIFIER ::= { pkcs-1 10 }
+
+ -- When the following OIDs are used in an AlgorithmIdentifier, the
+ -- parameters SHOULD be absent, but if the parameters are present,
+ -- they MUST be NULL.
+
+ --
+ -- id-sha1 is imported from RFC 3279. Additionally, the v1.5
+ -- signature algorithms (i.e., rsaWithSHA256) are now solely placed
+ -- in that module.
+ --
+
+ id-sha224 OBJECT IDENTIFIER ::=
+ { joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101)
+ csor(3) nistAlgorithms(4) hashalgs(2) 4 }
+
+ mda-sha224 DIGEST-ALGORITHM ::= {
+ IDENTIFIER id-sha224
+ PARAMS TYPE NULL ARE preferredAbsent
+ }
+
+ id-sha256 OBJECT IDENTIFIER ::=
+ { joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101)
+ csor(3) nistAlgorithms(4) hashalgs(2) 1 }
+
+ mda-sha256 DIGEST-ALGORITHM ::= {
+ IDENTIFIER id-sha256
+ PARAMS TYPE NULL ARE preferredAbsent
+ }
+ id-sha384 OBJECT IDENTIFIER ::=
+ { joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101)
+ csor(3) nistAlgorithms(4) hashalgs(2) 2 }
+
+ mda-sha384 DIGEST-ALGORITHM ::= {
+ IDENTIFIER id-sha384
+ PARAMS TYPE NULL ARE preferredAbsent
+ }
+ id-sha512 OBJECT IDENTIFIER ::=
+ { joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101)
+ csor(3) nistAlgorithms(4) hashalgs(2) 3 }
+
+ mda-sha512 DIGEST-ALGORITHM ::= {
+ IDENTIFIER id-sha512
+ PARAMS TYPE NULL ARE preferredAbsent
+ }
+
+ -- =============
+ -- Constants
+ -- =============
+
+ EncodingParameters ::= OCTET STRING(SIZE(0..MAX))
+
+ nullOctetString EncodingParameters ::= ''H
+
+ nullParameters NULL ::= NULL
+
+ -- =========================
+ -- Algorithm Identifiers
+ -- =========================
+
+ HashAlgorithm ::= AlgorithmIdentifier{DIGEST-ALGORITHM,
+ {HashAlgorithms}}
+
+ HashAlgorithms DIGEST-ALGORITHM ::= {
+ { IDENTIFIER id-sha1 PARAMS TYPE NULL ARE preferredPresent } |
+ { IDENTIFIER id-sha224 PARAMS TYPE NULL ARE preferredPresent } |
+ { IDENTIFIER id-sha256 PARAMS TYPE NULL ARE preferredPresent } |
+ { IDENTIFIER id-sha384 PARAMS TYPE NULL ARE preferredPresent } |
+ { IDENTIFIER id-sha512 PARAMS TYPE NULL ARE preferredPresent }
+ }
+
+ sha1Identifier HashAlgorithm ::= {
+ algorithm id-sha1,
+ parameters NULL : NULL
+ }
+
+ --
+ -- We have a default algorithm - create the value here
+ --
+
+ MaskGenAlgorithm ::= AlgorithmIdentifier{ALGORITHM,
+ {PKCS1MGFAlgorithms}}
+
+ mgf1SHA1 MaskGenAlgorithm ::= {
+ algorithm id-mgf1,
+ parameters HashAlgorithm : sha1Identifier
+ }
+
+ --
+ -- Define the set of mask generation functions
+ --
+ -- If the identifier is id-mgf1, any of the listed hash
+ -- algorithms may be used.
+ --
+
+ PKCS1MGFAlgorithms ALGORITHM ::= {
+ { IDENTIFIER id-mgf1 PARAMS TYPE HashAlgorithm ARE required },
+ ...
+ }
+
+ --
+ -- Define the set of known source algorithms for PSS
+ --
+
+ PSourceAlgorithm ::= AlgorithmIdentifier{ALGORITHM,
+ {PSS-SourceAlgorithms}}
+
+ PSS-SourceAlgorithms ALGORITHM ::= {
+ { IDENTIFIER id-pSpecified PARAMS TYPE EncodingParameters
+ ARE required },
+ ...
+ }
+ pSpecifiedEmpty PSourceAlgorithm ::= {
+ algorithm id-pSpecified,
+ parameters EncodingParameters : nullOctetString
+ }
+
+ -- ===================
+ -- Main structures
+ -- ===================
+
+ -- AlgorithmIdentifier parameters for id-RSASSA-PSS.
+ -- Note that the tags in this Sequence are explicit.
+ -- Note: The hash algorithm in hashAlgorithm and in
+ -- maskGenAlgorithm should be the same.
+
+ RSASSA-PSS-params ::= SEQUENCE {
+ hashAlgorithm [0] HashAlgorithm DEFAULT sha1Identifier,
+ maskGenAlgorithm [1] MaskGenAlgorithm DEFAULT mgf1SHA1,
+ saltLength [2] INTEGER DEFAULT 20,
+ trailerField [3] INTEGER DEFAULT 1
+ }
+
+ -- AlgorithmIdentifier parameters for id-RSAES-OAEP.
+ -- Note that the tags in this Sequence are explicit.
+ -- Note: The hash algorithm in hashFunc and in
+ -- maskGenFunc should be the same.
+
+ RSAES-OAEP-params ::= SEQUENCE {
+ hashFunc [0] HashAlgorithm DEFAULT sha1Identifier,
+ maskGenFunc [1] MaskGenAlgorithm DEFAULT mgf1SHA1,
+ pSourceFunc [2] PSourceAlgorithm DEFAULT
+ pSpecifiedEmpty
+ }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Explicit-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Explicit-2009.asn1
new file mode 100644
index 0000000000..613e0e9d2c
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Explicit-2009.asn1
@@ -0,0 +1,415 @@
+ PKIX1Explicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1)
+ security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-explicit-02(51)}
+ DEFINITIONS EXPLICIT TAGS ::=
+ BEGIN
+
+ IMPORTS
+
+ Extensions{}, EXTENSION, ATTRIBUTE, SingleAttribute{}
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57)}
+
+ AlgorithmIdentifier{}, PUBLIC-KEY, SIGNATURE-ALGORITHM
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ CertExtensions, CrlExtensions, CrlEntryExtensions
+ FROM PKIX1Implicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)}
+ SignatureAlgs, PublicKeys
+ FROM PKIXAlgs-2009
+ {iso(1) identified-organization(3) dod(6)
+ internet(1) security(5) mechanisms(5) pkix(7) id-mod(0) 56}
+
+ SignatureAlgs, PublicKeys
+ FROM PKIX1-PSS-OAEP-Algorithms-2009
+ {iso(1) identified-organization(3) dod(6)
+ internet(1) security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-rsa-pkalgs-02(54)}
+
+ ORAddress
+ FROM PKIX-X400Address-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-x400address-02(60)};
+
+ id-pkix OBJECT IDENTIFIER ::=
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7)}
+
+ -- PKIX arcs
+
+ id-pe OBJECT IDENTIFIER ::= { id-pkix 1 }
+ -- arc for private certificate extensions
+ id-qt OBJECT IDENTIFIER ::= { id-pkix 2 }
+ -- arc for policy qualifier types
+ id-kp OBJECT IDENTIFIER ::= { id-pkix 3 }
+ -- arc for extended key purpose OIDs
+ id-ad OBJECT IDENTIFIER ::= { id-pkix 48 }
+ -- arc for access descriptors
+
+ -- policyQualifierIds for Internet policy qualifiers
+
+ id-qt-cps OBJECT IDENTIFIER ::= { id-qt 1 }
+ -- OID for CPS qualifier
+ id-qt-unotice OBJECT IDENTIFIER ::= { id-qt 2 }
+ -- OID for user notice qualifier
+
+ -- access descriptor definitions
+
+ id-ad-ocsp OBJECT IDENTIFIER ::= { id-ad 1 }
+ id-ad-caIssuers OBJECT IDENTIFIER ::= { id-ad 2 }
+ id-ad-timeStamping OBJECT IDENTIFIER ::= { id-ad 3 }
+ id-ad-caRepository OBJECT IDENTIFIER ::= { id-ad 5 }
+
+ -- attribute data types
+ AttributeType ::= ATTRIBUTE.&id
+
+ -- Replaced by SingleAttribute{}
+ --
+ -- AttributeTypeAndValue ::= SEQUENCE {
+ -- type ATTRIBUTE.&id({SupportedAttributes}),
+ -- value ATTRIBUTE.&Type({SupportedAttributes}{@type}) }
+ --
+
+ -- Suggested naming attributes: Definition of the following
+ -- information object set may be augmented to meet local
+ -- requirements. Note that deleting members of the set may
+ -- prevent interoperability with conforming implementations.
+ -- All attributes are presented in pairs: the AttributeType
+ -- followed by the type definition for the corresponding
+ -- AttributeValue.
+
+ -- Arc for standard naming attributes
+
+ id-at OBJECT IDENTIFIER ::= { joint-iso-ccitt(2) ds(5) 4 }
+
+ -- Naming attributes of type X520name
+
+ id-at-name AttributeType ::= { id-at 41 }
+ at-name ATTRIBUTE ::= { TYPE X520name IDENTIFIED BY id-at-name }
+
+ id-at-surname AttributeType ::= { id-at 4 }
+ at-surname ATTRIBUTE ::= { TYPE X520name IDENTIFIED BY id-at-surname }
+
+ id-at-givenName AttributeType ::= { id-at 42 }
+ at-givenName ATTRIBUTE ::=
+ { TYPE X520name IDENTIFIED BY id-at-givenName }
+
+ id-at-initials AttributeType ::= { id-at 43 }
+ at-initials ATTRIBUTE ::=
+ { TYPE X520name IDENTIFIED BY id-at-initials }
+
+ id-at-generationQualifier AttributeType ::= { id-at 44 }
+ at-generationQualifier ATTRIBUTE ::=
+ { TYPE X520name IDENTIFIED BY id-at-generationQualifier }
+
+ -- Directory string type --
+
+ DirectoryString{INTEGER:maxSize} ::= CHOICE {
+ teletexString TeletexString(SIZE (1..maxSize)),
+ printableString PrintableString(SIZE (1..maxSize)),
+ bmpString BMPString(SIZE (1..maxSize)),
+ universalString UniversalString(SIZE (1..maxSize)),
+ uTF8String UTF8String(SIZE (1..maxSize))
+ }
+
+ X520name ::= DirectoryString {ub-name}
+
+ -- Naming attributes of type X520CommonName
+
+ id-at-commonName AttributeType ::= { id-at 3 }
+
+ at-x520CommonName ATTRIBUTE ::=
+ {TYPE X520CommonName IDENTIFIED BY id-at-commonName }
+
+ X520CommonName ::= DirectoryString {ub-common-name}
+
+ -- Naming attributes of type X520LocalityName
+
+ id-at-localityName AttributeType ::= { id-at 7 }
+
+ at-x520LocalityName ATTRIBUTE ::=
+ { TYPE X520LocalityName IDENTIFIED BY id-at-localityName }
+ X520LocalityName ::= DirectoryString {ub-locality-name}
+
+ -- Naming attributes of type X520StateOrProvinceName
+
+ id-at-stateOrProvinceName AttributeType ::= { id-at 8 }
+
+ at-x520StateOrProvinceName ATTRIBUTE ::=
+ { TYPE DirectoryString {ub-state-name}
+ IDENTIFIED BY id-at-stateOrProvinceName }
+ X520StateOrProvinceName ::= DirectoryString {ub-state-name}
+
+ -- Naming attributes of type X520OrganizationName
+
+ id-at-organizationName AttributeType ::= { id-at 10 }
+
+ at-x520OrganizationName ATTRIBUTE ::=
+ { TYPE DirectoryString {ub-organization-name}
+ IDENTIFIED BY id-at-organizationName }
+ X520OrganizationName ::= DirectoryString {ub-organization-name}
+
+ -- Naming attributes of type X520OrganizationalUnitName
+
+ id-at-organizationalUnitName AttributeType ::= { id-at 11 }
+
+ at-x520OrganizationalUnitName ATTRIBUTE ::=
+ { TYPE DirectoryString {ub-organizational-unit-name}
+ IDENTIFIED BY id-at-organizationalUnitName }
+ X520OrganizationalUnitName ::= DirectoryString
+ {ub-organizational-unit-name}
+
+ -- Naming attributes of type X520Title
+
+ id-at-title AttributeType ::= { id-at 12 }
+
+ at-x520Title ATTRIBUTE ::= { TYPE DirectoryString { ub-title }
+ IDENTIFIED BY id-at-title }
+
+ -- Naming attributes of type X520dnQualifier
+
+ id-at-dnQualifier AttributeType ::= { id-at 46 }
+
+ at-x520dnQualifier ATTRIBUTE ::= { TYPE PrintableString
+ IDENTIFIED BY id-at-dnQualifier }
+
+ -- Naming attributes of type X520countryName (digraph from IS 3166)
+
+ id-at-countryName AttributeType ::= { id-at 6 }
+
+ at-x520countryName ATTRIBUTE ::= { TYPE PrintableString (SIZE (2))
+ IDENTIFIED BY id-at-countryName }
+
+ -- Naming attributes of type X520SerialNumber
+
+ id-at-serialNumber AttributeType ::= { id-at 5 }
+
+ at-x520SerialNumber ATTRIBUTE ::= {TYPE PrintableString
+ (SIZE (1..ub-serial-number)) IDENTIFIED BY id-at-serialNumber }
+
+ -- Naming attributes of type X520Pseudonym
+
+ id-at-pseudonym AttributeType ::= { id-at 65 }
+
+ at-x520Pseudonym ATTRIBUTE ::= { TYPE DirectoryString {ub-pseudonym}
+ IDENTIFIED BY id-at-pseudonym }
+
+ -- Naming attributes of type DomainComponent (from RFC 2247)
+
+ id-domainComponent AttributeType ::=
+ { itu-t(0) data(9) pss(2342) ucl(19200300) pilot(100)
+ pilotAttributeType(1) 25 }
+
+ at-domainComponent ATTRIBUTE ::= {TYPE IA5String
+ IDENTIFIED BY id-domainComponent }
+
+ -- Legacy attributes
+
+ pkcs-9 OBJECT IDENTIFIER ::=
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) 9 }
+ id-emailAddress AttributeType ::= { pkcs-9 1 }
+
+ at-emailAddress ATTRIBUTE ::= {TYPE IA5String
+ (SIZE (1..ub-emailaddress-length)) IDENTIFIED BY
+ id-emailAddress }
+
+ -- naming data types --
+
+ Name ::= CHOICE { -- only one possibility for now --
+ rdnSequence RDNSequence }
+
+ RDNSequence ::= SEQUENCE OF RelativeDistinguishedName
+
+ DistinguishedName ::= RDNSequence
+
+ RelativeDistinguishedName ::=
+ SET SIZE (1 .. MAX) OF SingleAttribute { {SupportedAttributes} }
+
+ -- These are the known name elements for a DN
+
+ SupportedAttributes ATTRIBUTE ::= {
+ at-name | at-surname | at-givenName | at-initials |
+ at-generationQualifier | at-x520CommonName |
+ at-x520LocalityName | at-x520StateOrProvinceName |
+ at-x520OrganizationName | at-x520OrganizationalUnitName |
+ at-x520Title | at-x520dnQualifier | at-x520countryName |
+ at-x520SerialNumber | at-x520Pseudonym | at-domainComponent |
+ at-emailAddress, ... }
+
+ --
+ -- Certificate- and CRL-specific structures begin here
+ --
+
+ Certificate ::= SIGNED{TBSCertificate}
+
+ TBSCertificate ::= SEQUENCE {
+ version [0] Version DEFAULT v1,
+ serialNumber CertificateSerialNumber,
+ signature AlgorithmIdentifier{SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}},
+ issuer Name,
+ validity Validity,
+ subject Name,
+ subjectPublicKeyInfo SubjectPublicKeyInfo,
+ ... ,
+ [[2: -- If present, version MUST be v2
+ issuerUniqueID [1] IMPLICIT UniqueIdentifier OPTIONAL,
+ subjectUniqueID [2] IMPLICIT UniqueIdentifier OPTIONAL
+ ]],
+ [[3: -- If present, version MUST be v3 --
+ extensions [3] Extensions{{CertExtensions}} OPTIONAL
+ ]], ... }
+
+ Version ::= INTEGER { v1(0), v2(1), v3(2) }
+
+ CertificateSerialNumber ::= INTEGER
+
+ Validity ::= SEQUENCE {
+ notBefore Time,
+ notAfter Time }
+
+ Time ::= CHOICE {
+ utcTime UTCTime,
+ generalTime GeneralizedTime }
+
+ UniqueIdentifier ::= BIT STRING
+
+ SubjectPublicKeyInfo ::= SEQUENCE {
+ algorithm AlgorithmIdentifier{PUBLIC-KEY,
+ {PublicKeyAlgorithms}},
+ subjectPublicKey BIT STRING }
+
+ -- CRL structures
+
+ CertificateList ::= SIGNED{TBSCertList}
+
+ TBSCertList ::= SEQUENCE {
+ version Version OPTIONAL,
+ -- if present, MUST be v2
+ signature AlgorithmIdentifier{SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}},
+ issuer Name,
+ thisUpdate Time,
+ nextUpdate Time OPTIONAL,
+ revokedCertificates SEQUENCE SIZE (1..MAX) OF SEQUENCE {
+ userCertificate CertificateSerialNumber,
+ revocationDate Time,
+ ... ,
+ [[2: -- if present, version MUST be v2
+ crlEntryExtensions Extensions{{CrlEntryExtensions}}
+ OPTIONAL
+ ]], ...
+ } OPTIONAL,
+ ... ,
+ [[2: -- if present, version MUST be v2
+ crlExtensions [0] Extensions{{CrlExtensions}}
+ OPTIONAL
+ ]], ... }
+
+ -- Version, Time, CertificateSerialNumber, and Extensions were
+ -- defined earlier for use in the certificate structure
+
+ --
+ -- The two object sets below should be expanded to include
+ -- those algorithms which are supported by the system.
+ --
+ -- For example:
+ -- SignatureAlgorithms SIGNATURE-ALGORITHM ::= {
+ -- PKIXAlgs-2008.SignatureAlgs, ...,
+ -- - - RFC 3279 provides the base set
+ -- PKIX1-PSS-OAEP-ALGORITHMS.SignatureAlgs |
+ -- - - RFC 4055 provides extension algs
+ -- OtherModule.SignatureAlgs
+ -- - - RFC XXXX provides additional extension algs
+ -- }
+
+ SignatureAlgorithms SIGNATURE-ALGORITHM ::= {
+ PKIXAlgs-2009.SignatureAlgs, ...,
+ PKIX1-PSS-OAEP-Algorithms-2009.SignatureAlgs }
+
+ PublicKeyAlgorithms PUBLIC-KEY ::= {
+ PKIXAlgs-2009.PublicKeys, ...,
+ PKIX1-PSS-OAEP-Algorithms-2009.PublicKeys}
+
+ -- Upper Bounds
+
+ ub-state-name INTEGER ::= 128
+ ub-organization-name INTEGER ::= 64
+ ub-organizational-unit-name INTEGER ::= 64
+ ub-title INTEGER ::= 64
+ ub-serial-number INTEGER ::= 64
+ ub-pseudonym INTEGER ::= 128
+ ub-emailaddress-length INTEGER ::= 255
+ ub-locality-name INTEGER ::= 128
+ ub-common-name INTEGER ::= 64
+ ub-name INTEGER ::= 32768
+
+ -- Note - upper bounds on string types, such as TeletexString, are
+ -- measured in characters. Excepting PrintableString or IA5String, a
+ -- significantly greater number of octets will be required to hold
+ -- such a value. As a minimum, 16 octets or twice the specified
+ -- upper bound, whichever is the larger, should be allowed for
+ -- TeletexString. For UTF8String or UniversalString, at least four
+ -- times the upper bound should be allowed.
+
+ -- Information object classes used in the definition
+ -- of certificates and CRLs
+
+ -- Parameterized Type SIGNED
+ --
+ -- Three different versions of doing SIGNED:
+ -- 1. Simple and close to the previous version
+ --
+ -- SIGNED{ToBeSigned} ::= SEQUENCE {
+ -- toBeSigned ToBeSigned,
+ -- algorithm AlgorithmIdentifier{SIGNATURE-ALGORITHM,
+ -- {SignatureAlgorithms}},
+ -- signature BIT STRING
+ -- }
+
+ -- 2. From Authenticated Framework
+ --
+ -- SIGNED{ToBeSigned} ::= SEQUENCE {
+ -- toBeSigned ToBeSigned,
+ -- COMPONENTS OF SIGNATURE{ToBeSigned}
+ -- }
+ -- SIGNATURE{ToBeSigned} ::= SEQUENCE {
+ -- algorithmIdentifier AlgorithmIdentifier,
+ -- encrypted ENCRYPTED-HASH{ToBeSigned}
+ -- }
+ -- ENCRYPTED-HASH{ToBeSigned} ::=
+ -- BIT STRING
+ -- (CONSTRAINED BY {
+ -- shall be the result of applying a hashing procedure to
+ -- the DER-encoded (see 4.1) octets of a value of
+ -- ToBeSigned and then applying an encipherment procedure
+ -- to those octets
+ -- })
+ --
+ --
+ -- 3. A more complex version, but one that automatically ties
+ -- together both the signature algorithm and the
+ -- signature value for automatic decoding.
+ --
+ SIGNED{ToBeSigned} ::= SEQUENCE {
+ toBeSigned ToBeSigned,
+ algorithmIdentifier SEQUENCE {
+ algorithm SIGNATURE-ALGORITHM.
+ &id({SignatureAlgorithms}),
+ parameters SIGNATURE-ALGORITHM.
+ &Params({SignatureAlgorithms}
+ {@algorithmIdentifier.algorithm}) OPTIONAL
+ },
+ signature BIT STRING (CONTAINING SIGNATURE-ALGORITHM.&Value(
+ {SignatureAlgorithms}
+ {@algorithmIdentifier.algorithm}))
+ }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Implicit-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Implicit-2009.asn1
new file mode 100644
index 0000000000..3651a5249b
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Implicit-2009.asn1
@@ -0,0 +1,447 @@
+ PKIX1Implicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)}
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ AttributeSet{}, EXTENSION, ATTRIBUTE
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57) }
+
+ id-pe, id-kp, id-qt-unotice, id-qt-cps, ORAddress, Name,
+ RelativeDistinguishedName, CertificateSerialNumber,
+ DirectoryString{}, SupportedAttributes
+ FROM PKIX1Explicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51) };
+
+ CertExtensions EXTENSION ::= {
+ ext-AuthorityKeyIdentifier | ext-SubjectKeyIdentifier |
+ ext-KeyUsage | ext-PrivateKeyUsagePeriod |
+ ext-CertificatePolicies | ext-PolicyMappings |
+ ext-SubjectAltName | ext-IssuerAltName |
+ ext-SubjectDirectoryAttributes |
+ ext-BasicConstraints | ext-NameConstraints |
+ ext-PolicyConstraints | ext-ExtKeyUsage |
+ ext-CRLDistributionPoints | ext-InhibitAnyPolicy |
+ ext-FreshestCRL | ext-AuthorityInfoAccess |
+ ext-SubjectInfoAccessSyntax, ... }
+
+ CrlExtensions EXTENSION ::= {
+ ext-AuthorityKeyIdentifier | ext-IssuerAltName |
+ ext-CRLNumber | ext-DeltaCRLIndicator |
+ ext-IssuingDistributionPoint | ext-FreshestCRL, ... }
+
+ CrlEntryExtensions EXTENSION ::= {
+ ext-CRLReason | ext-CertificateIssuer |
+ ext-HoldInstructionCode | ext-InvalidityDate, ... }
+ -- Shared arc for standard certificate and CRL extensions
+
+ id-ce OBJECT IDENTIFIER ::= { joint-iso-ccitt(2) ds(5) 29 }
+
+ -- authority key identifier OID and syntax
+
+ ext-AuthorityKeyIdentifier EXTENSION ::= { SYNTAX
+ AuthorityKeyIdentifier IDENTIFIED BY
+ id-ce-authorityKeyIdentifier }
+ id-ce-authorityKeyIdentifier OBJECT IDENTIFIER ::= { id-ce 35 }
+
+ AuthorityKeyIdentifier ::= SEQUENCE {
+ keyIdentifier [0] KeyIdentifier OPTIONAL,
+ authorityCertIssuer [1] GeneralNames OPTIONAL,
+ authorityCertSerialNumber [2] CertificateSerialNumber OPTIONAL }
+ (WITH COMPONENTS {
+ ...,
+ authorityCertIssuer PRESENT,
+ authorityCertSerialNumber PRESENT
+ } |
+ WITH COMPONENTS {
+ ...,
+ authorityCertIssuer ABSENT,
+ authorityCertSerialNumber ABSENT
+ })
+
+ KeyIdentifier ::= OCTET STRING
+
+ -- subject key identifier OID and syntax
+
+ ext-SubjectKeyIdentifier EXTENSION ::= { SYNTAX
+ KeyIdentifier IDENTIFIED BY id-ce-subjectKeyIdentifier }
+ id-ce-subjectKeyIdentifier OBJECT IDENTIFIER ::= { id-ce 14 }
+
+ -- key usage extension OID and syntax
+
+ ext-KeyUsage EXTENSION ::= { SYNTAX
+ KeyUsage IDENTIFIED BY id-ce-keyUsage }
+ id-ce-keyUsage OBJECT IDENTIFIER ::= { id-ce 15 }
+
+ KeyUsage ::= BIT STRING {
+ digitalSignature (0),
+ nonRepudiation (1), -- recent editions of X.509 have
+ -- renamed this bit to
+ -- contentCommitment
+ keyEncipherment (2),
+ dataEncipherment (3),
+ keyAgreement (4),
+ keyCertSign (5),
+ cRLSign (6),
+ encipherOnly (7),
+ decipherOnly (8)
+ }
+
+ -- private key usage period extension OID and syntax
+
+ ext-PrivateKeyUsagePeriod EXTENSION ::= { SYNTAX
+ PrivateKeyUsagePeriod IDENTIFIED BY id-ce-privateKeyUsagePeriod }
+ id-ce-privateKeyUsagePeriod OBJECT IDENTIFIER ::= { id-ce 16 }
+
+ PrivateKeyUsagePeriod ::= SEQUENCE {
+ notBefore [0] GeneralizedTime OPTIONAL,
+ notAfter [1] GeneralizedTime OPTIONAL }
+ (WITH COMPONENTS {..., notBefore PRESENT } |
+ WITH COMPONENTS {..., notAfter PRESENT })
+
+ -- certificate policies extension OID and syntax
+
+ ext-CertificatePolicies EXTENSION ::= { SYNTAX
+ CertificatePolicies IDENTIFIED BY id-ce-certificatePolicies}
+ id-ce-certificatePolicies OBJECT IDENTIFIER ::= { id-ce 32 }
+
+ CertificatePolicies ::= SEQUENCE SIZE (1..MAX) OF PolicyInformation
+
+ PolicyInformation ::= SEQUENCE {
+ policyIdentifier CertPolicyId,
+ policyQualifiers SEQUENCE SIZE (1..MAX) OF
+ PolicyQualifierInfo OPTIONAL }
+
+ CertPolicyId ::= OBJECT IDENTIFIER
+
+ CERT-POLICY-QUALIFIER ::= TYPE-IDENTIFIER
+
+ PolicyQualifierInfo ::= SEQUENCE {
+ policyQualifierId CERT-POLICY-QUALIFIER.
+ &id({PolicyQualifierId}),
+ qualifier CERT-POLICY-QUALIFIER.
+ &Type({PolicyQualifierId}{@policyQualifierId})}
+
+ -- Implementations that recognize additional policy qualifiers MUST
+ -- augment the following definition for PolicyQualifierId
+
+ PolicyQualifierId CERT-POLICY-QUALIFIER ::=
+ { pqid-cps | pqid-unotice, ... }
+
+ pqid-cps CERT-POLICY-QUALIFIER ::= { CPSuri IDENTIFIED BY id-qt-cps }
+ pqid-unotice CERT-POLICY-QUALIFIER ::= { UserNotice
+ IDENTIFIED BY id-qt-unotice }
+
+ -- CPS pointer qualifier
+
+ CPSuri ::= IA5String
+
+ -- user notice qualifier
+
+ UserNotice ::= SEQUENCE {
+ noticeRef NoticeReference OPTIONAL,
+ explicitText DisplayText OPTIONAL}
+
+ --
+ -- This is not made explicit in the text
+ --
+ -- {WITH COMPONENTS {..., noticeRef PRESENT} |
+ -- WITH COMPONENTS {..., DisplayText PRESENT }}
+
+ NoticeReference ::= SEQUENCE {
+ organization DisplayText,
+ noticeNumbers SEQUENCE OF INTEGER }
+
+ DisplayText ::= CHOICE {
+ ia5String IA5String (SIZE (1..200)),
+ visibleString VisibleString (SIZE (1..200)),
+ bmpString BMPString (SIZE (1..200)),
+ utf8String UTF8String (SIZE (1..200)) }
+
+ -- policy mapping extension OID and syntax
+
+ ext-PolicyMappings EXTENSION ::= { SYNTAX
+ PolicyMappings IDENTIFIED BY id-ce-policyMappings }
+ id-ce-policyMappings OBJECT IDENTIFIER ::= { id-ce 33 }
+
+ PolicyMappings ::= SEQUENCE SIZE (1..MAX) OF SEQUENCE {
+ issuerDomainPolicy CertPolicyId,
+ subjectDomainPolicy CertPolicyId
+ }
+
+ -- subject alternative name extension OID and syntax
+
+ ext-SubjectAltName EXTENSION ::= { SYNTAX
+ GeneralNames IDENTIFIED BY id-ce-subjectAltName }
+ id-ce-subjectAltName OBJECT IDENTIFIER ::= { id-ce 17 }
+
+ GeneralNames ::= SEQUENCE SIZE (1..MAX) OF GeneralName
+
+ GeneralName ::= CHOICE {
+ otherName [0] INSTANCE OF OTHER-NAME,
+ rfc822Name [1] IA5String,
+ dNSName [2] IA5String,
+ x400Address [3] ORAddress,
+ directoryName [4] Name,
+ ediPartyName [5] EDIPartyName,
+ uniformResourceIdentifier [6] IA5String,
+ iPAddress [7] OCTET STRING,
+ registeredID [8] OBJECT IDENTIFIER
+ }
+
+ -- AnotherName replaces OTHER-NAME ::= TYPE-IDENTIFIER, as
+ -- TYPE-IDENTIFIER is not supported in the '88 ASN.1 syntax
+
+ OTHER-NAME ::= TYPE-IDENTIFIER
+
+ EDIPartyName ::= SEQUENCE {
+ nameAssigner [0] DirectoryString {ubMax} OPTIONAL,
+ partyName [1] DirectoryString {ubMax}
+ }
+
+ -- issuer alternative name extension OID and syntax
+
+ ext-IssuerAltName EXTENSION ::= { SYNTAX
+ GeneralNames IDENTIFIED BY id-ce-issuerAltName }
+ id-ce-issuerAltName OBJECT IDENTIFIER ::= { id-ce 18 }
+
+ ext-SubjectDirectoryAttributes EXTENSION ::= { SYNTAX
+ SubjectDirectoryAttributes IDENTIFIED BY
+ id-ce-subjectDirectoryAttributes }
+ id-ce-subjectDirectoryAttributes OBJECT IDENTIFIER ::= { id-ce 9 }
+
+ SubjectDirectoryAttributes ::= SEQUENCE SIZE (1..MAX) OF
+ AttributeSet{{SupportedAttributes}}
+
+ -- basic constraints extension OID and syntax
+
+ ext-BasicConstraints EXTENSION ::= { SYNTAX
+ BasicConstraints IDENTIFIED BY id-ce-basicConstraints }
+ id-ce-basicConstraints OBJECT IDENTIFIER ::= { id-ce 19 }
+
+ BasicConstraints ::= SEQUENCE {
+ cA BOOLEAN DEFAULT FALSE,
+ pathLenConstraint INTEGER (0..MAX) OPTIONAL
+ }
+
+ -- name constraints extension OID and syntax
+ ext-NameConstraints EXTENSION ::= { SYNTAX
+ NameConstraints IDENTIFIED BY id-ce-nameConstraints }
+ id-ce-nameConstraints OBJECT IDENTIFIER ::= { id-ce 30 }
+
+ NameConstraints ::= SEQUENCE {
+ permittedSubtrees [0] GeneralSubtrees OPTIONAL,
+ excludedSubtrees [1] GeneralSubtrees OPTIONAL
+ }
+ --
+ -- This is a constraint in the issued certificates by CAs, but is
+ -- not a requirement on EEs.
+ --
+ -- (WITH COMPONENTS { ..., permittedSubtrees PRESENT} |
+ -- WITH COMPONENTS { ..., excludedSubtrees PRESENT }}
+
+ GeneralSubtrees ::= SEQUENCE SIZE (1..MAX) OF GeneralSubtree
+
+ GeneralSubtree ::= SEQUENCE {
+ base GeneralName,
+ minimum [0] BaseDistance DEFAULT 0,
+ maximum [1] BaseDistance OPTIONAL
+ }
+
+ BaseDistance ::= INTEGER (0..MAX)
+
+ -- policy constraints extension OID and syntax
+
+ ext-PolicyConstraints EXTENSION ::= { SYNTAX
+ PolicyConstraints IDENTIFIED BY id-ce-policyConstraints }
+ id-ce-policyConstraints OBJECT IDENTIFIER ::= { id-ce 36 }
+
+ PolicyConstraints ::= SEQUENCE {
+ requireExplicitPolicy [0] SkipCerts OPTIONAL,
+ inhibitPolicyMapping [1] SkipCerts OPTIONAL }
+ --
+ -- This is a constraint in the issued certificates by CAs,
+ -- but is not a requirement for EEs
+ --
+ -- (WITH COMPONENTS { ..., requireExplicitPolicy PRESENT} |
+ -- WITH COMPONENTS { ..., inhibitPolicyMapping PRESENT})
+
+ SkipCerts ::= INTEGER (0..MAX)
+
+ -- CRL distribution points extension OID and syntax
+
+ ext-CRLDistributionPoints EXTENSION ::= { SYNTAX
+ CRLDistributionPoints IDENTIFIED BY id-ce-cRLDistributionPoints}
+ id-ce-cRLDistributionPoints OBJECT IDENTIFIER ::= {id-ce 31}
+ CRLDistributionPoints ::= SEQUENCE SIZE (1..MAX) OF DistributionPoint
+
+ DistributionPoint ::= SEQUENCE {
+ distributionPoint [0] DistributionPointName OPTIONAL,
+ reasons [1] ReasonFlags OPTIONAL,
+ cRLIssuer [2] GeneralNames OPTIONAL
+ }
+ --
+ -- This is not a requirement in the text, but it seems as if it
+ -- should be
+ --
+ --(WITH COMPONENTS {..., distributionPoint PRESENT} |
+ -- WITH COMPONENTS {..., cRLIssuer PRESENT})
+
+ DistributionPointName ::= CHOICE {
+ fullName [0] GeneralNames,
+ nameRelativeToCRLIssuer [1] RelativeDistinguishedName
+ }
+
+ ReasonFlags ::= BIT STRING {
+ unused (0),
+ keyCompromise (1),
+ cACompromise (2),
+ affiliationChanged (3),
+ superseded (4),
+ cessationOfOperation (5),
+ certificateHold (6),
+ privilegeWithdrawn (7),
+ aACompromise (8)
+ }
+
+ -- extended key usage extension OID and syntax
+
+ ext-ExtKeyUsage EXTENSION ::= { SYNTAX
+ ExtKeyUsageSyntax IDENTIFIED BY id-ce-extKeyUsage }
+ id-ce-extKeyUsage OBJECT IDENTIFIER ::= {id-ce 37}
+
+ ExtKeyUsageSyntax ::= SEQUENCE SIZE (1..MAX) OF KeyPurposeId
+
+ KeyPurposeId ::= OBJECT IDENTIFIER
+
+ -- permit unspecified key uses
+
+ anyExtendedKeyUsage OBJECT IDENTIFIER ::= { id-ce-extKeyUsage 0 }
+
+ -- extended key purpose OIDs
+
+ id-kp-serverAuth OBJECT IDENTIFIER ::= { id-kp 1 }
+ id-kp-clientAuth OBJECT IDENTIFIER ::= { id-kp 2 }
+ id-kp-codeSigning OBJECT IDENTIFIER ::= { id-kp 3 }
+ id-kp-emailProtection OBJECT IDENTIFIER ::= { id-kp 4 }
+ id-kp-timeStamping OBJECT IDENTIFIER ::= { id-kp 8 }
+ id-kp-OCSPSigning OBJECT IDENTIFIER ::= { id-kp 9 }
+
+ -- inhibit any policy OID and syntax
+
+ ext-InhibitAnyPolicy EXTENSION ::= {SYNTAX
+ SkipCerts IDENTIFIED BY id-ce-inhibitAnyPolicy }
+ id-ce-inhibitAnyPolicy OBJECT IDENTIFIER ::= { id-ce 54 }
+
+ -- freshest (delta)CRL extension OID and syntax
+
+ ext-FreshestCRL EXTENSION ::= {SYNTAX
+ CRLDistributionPoints IDENTIFIED BY id-ce-freshestCRL }
+ id-ce-freshestCRL OBJECT IDENTIFIER ::= { id-ce 46 }
+
+ -- authority info access
+
+ ext-AuthorityInfoAccess EXTENSION ::= { SYNTAX
+ AuthorityInfoAccessSyntax IDENTIFIED BY
+ id-pe-authorityInfoAccess }
+ id-pe-authorityInfoAccess OBJECT IDENTIFIER ::= { id-pe 1 }
+
+ AuthorityInfoAccessSyntax ::=
+ SEQUENCE SIZE (1..MAX) OF AccessDescription
+
+ AccessDescription ::= SEQUENCE {
+ accessMethod OBJECT IDENTIFIER,
+ accessLocation GeneralName }
+
+ -- subject info access
+
+ ext-SubjectInfoAccessSyntax EXTENSION ::= { SYNTAX
+ SubjectInfoAccessSyntax IDENTIFIED BY id-pe-subjectInfoAccess }
+ id-pe-subjectInfoAccess OBJECT IDENTIFIER ::= { id-pe 11 }
+
+ SubjectInfoAccessSyntax ::=
+ SEQUENCE SIZE (1..MAX) OF AccessDescription
+
+ -- CRL number extension OID and syntax
+
+ ext-CRLNumber EXTENSION ::= {SYNTAX
+ INTEGER (0..MAX) IDENTIFIED BY id-ce-cRLNumber }
+ id-ce-cRLNumber OBJECT IDENTIFIER ::= { id-ce 20 }
+
+ CRLNumber ::= INTEGER (0..MAX)
+ -- issuing distribution point extension OID and syntax
+
+ ext-IssuingDistributionPoint EXTENSION ::= { SYNTAX
+ IssuingDistributionPoint IDENTIFIED BY
+ id-ce-issuingDistributionPoint }
+ id-ce-issuingDistributionPoint OBJECT IDENTIFIER ::= { id-ce 28 }
+
+ IssuingDistributionPoint ::= SEQUENCE {
+ distributionPoint [0] DistributionPointName OPTIONAL,
+ onlyContainsUserCerts [1] BOOLEAN DEFAULT FALSE,
+ onlyContainsCACerts [2] BOOLEAN DEFAULT FALSE,
+ onlySomeReasons [3] ReasonFlags OPTIONAL,
+ indirectCRL [4] BOOLEAN DEFAULT FALSE,
+ onlyContainsAttributeCerts [5] BOOLEAN DEFAULT FALSE
+ }
+ -- at most one of onlyContainsUserCerts, onlyContainsCACerts,
+ -- or onlyContainsAttributeCerts may be set to TRUE.
+
+ ext-DeltaCRLIndicator EXTENSION ::= { SYNTAX
+ CRLNumber IDENTIFIED BY id-ce-deltaCRLIndicator }
+ id-ce-deltaCRLIndicator OBJECT IDENTIFIER ::= { id-ce 27 }
+
+ -- CRL reasons extension OID and syntax
+
+ ext-CRLReason EXTENSION ::= { SYNTAX
+ CRLReason IDENTIFIED BY id-ce-cRLReasons }
+ id-ce-cRLReasons OBJECT IDENTIFIER ::= { id-ce 21 }
+
+ CRLReason ::= ENUMERATED {
+ unspecified (0),
+ keyCompromise (1),
+ cACompromise (2),
+ affiliationChanged (3),
+ superseded (4),
+ cessationOfOperation (5),
+ certificateHold (6),
+ removeFromCRL (8),
+ privilegeWithdrawn (9),
+ aACompromise (10)
+ }
+
+ -- certificate issuer CRL entry extension OID and syntax
+
+ ext-CertificateIssuer EXTENSION ::= { SYNTAX
+ GeneralNames IDENTIFIED BY id-ce-certificateIssuer }
+ id-ce-certificateIssuer OBJECT IDENTIFIER ::= { id-ce 29 }
+
+ -- hold instruction extension OID and syntax
+ ext-HoldInstructionCode EXTENSION ::= { SYNTAX
+ OBJECT IDENTIFIER IDENTIFIED BY id-ce-holdInstructionCode }
+ id-ce-holdInstructionCode OBJECT IDENTIFIER ::= { id-ce 23 }
+
+ -- ANSI x9 holdinstructions
+
+ holdInstruction OBJECT IDENTIFIER ::=
+ {joint-iso-itu-t(2) member-body(2) us(840) x9cm(10040) 2}
+ id-holdinstruction-none OBJECT IDENTIFIER ::=
+ {holdInstruction 1} -- deprecated
+ id-holdinstruction-callissuer OBJECT IDENTIFIER ::=
+ {holdInstruction 2}
+ id-holdinstruction-reject OBJECT IDENTIFIER ::=
+ {holdInstruction 3}
+
+ -- invalidity date CRL entry extension OID and syntax
+
+ ext-InvalidityDate EXTENSION ::= { SYNTAX
+ GeneralizedTime IDENTIFIED BY id-ce-invalidityDate }
+ id-ce-invalidityDate OBJECT IDENTIFIER ::= { id-ce 24 }
+ -- Upper bounds
+ ubMax INTEGER ::= 32768
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAlgs-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAlgs-2009.asn1
new file mode 100644
index 0000000000..d58bcb5b19
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAlgs-2009.asn1
@@ -0,0 +1,528 @@
+ PKIXAlgs-2009 { iso(1) identified-organization(3) dod(6)
+ internet(1) security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-algorithms2008-02(56) }
+
+ DEFINITIONS EXPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ PUBLIC-KEY, SIGNATURE-ALGORITHM, DIGEST-ALGORITHM, SMIME-CAPS
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ mda-sha224, mda-sha256, mda-sha384, mda-sha512
+ FROM PKIX1-PSS-OAEP-Algorithms-2009
+ {iso(1) identified-organization(3) dod(6) internet(1)
+ security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-rsa-pkalgs-02(54)} ;
+
+ --
+ -- Public Key (pk-) Algorithms
+ --
+
+ PublicKeys PUBLIC-KEY ::= {
+ pk-rsa |
+ pk-dsa |
+ pk-dh |
+ pk-kea,
+ ...,
+ pk-ec |
+ pk-ecDH |
+ pk-ecMQV
+ }
+
+ --
+ -- Signature Algorithms (sa-)
+ --
+
+ SignatureAlgs SIGNATURE-ALGORITHM ::= {
+ sa-rsaWithMD2 |
+ sa-rsaWithMD5 |
+ sa-rsaWithSHA1 |
+ sa-dsaWithSHA1 |
+ sa-ecdsaWithSHA1,
+ ..., -- Extensible
+ sa-dsaWithSHA224 |
+ sa-dsaWithSHA256 |
+ sa-ecdsaWithSHA224 |
+ sa-ecdsaWithSHA256 |
+ sa-ecdsaWithSHA384 |
+ sa-ecdsaWithSHA512
+ }
+
+ --
+ -- S/MIME CAPS for algorithms in this document
+ --
+ -- For all of the algorithms laid out in this document, the
+ -- parameters field for the S/MIME capabilities is defined as
+ -- ABSENT as there are no specific values that need to be known
+ -- by the receiver for negotiation.
+
+ --
+
+ SMimeCaps SMIME-CAPS ::= {
+ sa-rsaWithMD2.&smimeCaps |
+ sa-rsaWithMD5.&smimeCaps |
+ sa-rsaWithSHA1.&smimeCaps |
+ sa-dsaWithSHA1.&smimeCaps |
+ sa-dsaWithSHA224.&smimeCaps |
+ sa-dsaWithSHA256.&smimeCaps |
+ sa-ecdsaWithSHA1.&smimeCaps |
+ sa-ecdsaWithSHA224.&smimeCaps |
+ sa-ecdsaWithSHA256.&smimeCaps |
+ sa-ecdsaWithSHA384.&smimeCaps |
+ sa-ecdsaWithSHA512.&smimeCaps,
+ ... }
+
+ -- RSA PK Algorithm, Parameters, and Keys
+
+ pk-rsa PUBLIC-KEY ::= {
+ IDENTIFIER rsaEncryption
+ KEY RSAPublicKey
+ PARAMS TYPE NULL ARE absent
+ -- Private key format not in this module --
+ CERT-KEY-USAGE {digitalSignature, nonRepudiation,
+ keyEncipherment, dataEncipherment, keyCertSign, cRLSign}
+ }
+
+ rsaEncryption OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1)
+ pkcs-1(1) 1 }
+
+ RSAPublicKey ::= SEQUENCE {
+ modulus INTEGER, -- n
+ publicExponent INTEGER -- e
+ }
+
+ -- DSA PK Algorithm, Parameters, and Keys
+
+ pk-dsa PUBLIC-KEY ::= {
+ IDENTIFIER id-dsa
+ KEY DSAPublicKey
+ PARAMS TYPE DSA-Params ARE inheritable
+ -- Private key format not in this module --
+ CERT-KEY-USAGE { digitalSignature, nonRepudiation, keyCertSign,
+ cRLSign }
+ }
+
+ id-dsa OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) x9-57(10040) x9algorithm(4) 1 }
+
+ DSA-Params ::= SEQUENCE {
+ p INTEGER,
+ q INTEGER,
+ g INTEGER
+ }
+
+ DSAPublicKey ::= INTEGER -- public key, y
+
+ -- Diffie-Hellman PK Algorithm, Parameters, and Keys
+
+ pk-dh PUBLIC-KEY ::= {
+ IDENTIFIER dhpublicnumber
+ KEY DHPublicKey
+ PARAMS TYPE DomainParameters ARE inheritable
+ -- Private key format not in this module --
+ CERT-KEY-USAGE {keyAgreement, encipherOnly, decipherOnly }
+ }
+
+ dhpublicnumber OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-x942(10046)
+ number-type(2) 1 }
+
+ DomainParameters ::= SEQUENCE {
+ p INTEGER, -- odd prime, p=jq +1
+ g INTEGER, -- generator, g
+ q INTEGER, -- factor of p-1
+ j INTEGER OPTIONAL, -- subgroup factor, j>= 2
+ validationParams ValidationParams OPTIONAL
+ }
+
+ ValidationParams ::= SEQUENCE {
+ seed BIT STRING,
+ pgenCounter INTEGER
+ }
+
+ DiffieHellmanPublicNumber ::= INTEGER -- according to http://wikisec.free.fr/crypto/crypto.html
+
+ DHPublicKey ::= INTEGER -- public key, y = g^x mod p
+
+ -- KEA PK Algorithm and Parameters
+
+ pk-kea PUBLIC-KEY ::= {
+ IDENTIFIER id-keyExchangeAlgorithm
+ -- key is not encoded --
+ PARAMS TYPE KEA-Params-Id ARE required
+ -- Private key format not in this module --
+ CERT-KEY-USAGE {keyAgreement, encipherOnly, decipherOnly }
+ }
+ id-keyExchangeAlgorithm OBJECT IDENTIFIER ::= {
+ joint-iso-itu-t(2) country(16) us(840) organization(1)
+ gov(101) dod(2) infosec(1) algorithms(1) 22 }
+
+ KEA-Params-Id ::= OCTET STRING
+
+ -- Elliptic Curve (EC) Signatures: Unrestricted Algorithms
+ -- (Section 2.1.1 of RFC 5480)
+ --
+ -- EC Unrestricted Algorithm ID -- -- this is used for ECDSA
+
+ pk-ec PUBLIC-KEY ::= {
+ IDENTIFIER id-ecPublicKey
+ KEY ECPoint
+ PARAMS TYPE ECParameters ARE required
+ -- Private key format not in this module --
+ CERT-KEY-USAGE { digitalSignature, nonRepudiation, keyAgreement,
+ keyCertSign, cRLSign }
+ }
+
+ ECPoint ::= OCTET STRING -- see RFC 5480 for syntax and restrictions
+
+ id-ecPublicKey OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-X9-62(10045) keyType(2) 1 }
+
+ -- Elliptic Curve (EC) Signatures: Restricted Algorithms
+ -- (Section 2.1.2 of RFC 5480)
+ --
+ -- EC Diffie-Hellman Algorithm ID
+
+ pk-ecDH PUBLIC-KEY ::= {
+ IDENTIFIER id-ecDH
+ KEY ECPoint
+ PARAMS TYPE ECParameters ARE required
+ -- Private key format not in this module --
+ CERT-KEY-USAGE { keyAgreement, encipherOnly, decipherOnly }
+ }
+
+ id-ecDH OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) schemes(1)
+ ecdh(12) }
+
+ -- EC Menezes-Qu-Vanstone Algorithm ID
+
+ pk-ecMQV PUBLIC-KEY ::= {
+ IDENTIFIER id-ecMQV
+ KEY ECPoint
+ PARAMS TYPE ECParameters ARE required
+ -- Private key format not in this module --
+ CERT-KEY-USAGE { keyAgreement, encipherOnly, decipherOnly }
+ }
+
+ id-ecMQV OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) schemes(1)
+ ecmqv(13) }
+
+ -- Parameters and Keys for both Restricted and Unrestricted EC
+
+ ECParameters ::= CHOICE {
+ namedCurve CURVE.&id({NamedCurve})
+ -- implicitCurve NULL
+ -- implicitCurve MUST NOT be used in PKIX
+ -- specifiedCurve SpecifiedCurve
+ -- specifiedCurve MUST NOT be used in PKIX
+ -- Details for specifiedCurve can be found in [X9.62]
+ -- Any future additions to this CHOICE should be coordinated
+ -- with ANSI X.9.
+ }
+ -- If you need to be able to decode ANSI X.9 parameter structures,
+ -- uncomment the implicitCurve and specifiedCurve above, and also
+ -- uncomment the following:
+ --(WITH COMPONENTS {namedCurve PRESENT})
+
+ -- Sec 2.1.1.1 Named Curve
+
+ CURVE ::= CLASS { &id OBJECT IDENTIFIER UNIQUE }
+ WITH SYNTAX { ID &id }
+
+ NamedCurve CURVE ::= {
+ { ID secp192r1 } | { ID sect163k1 } | { ID sect163r2 } |
+ { ID secp224r1 } | { ID sect233k1 } | { ID sect233r1 } |
+ { ID secp256r1 } | { ID sect283k1 } | { ID sect283r1 } |
+ { ID secp384r1 } | { ID sect409k1 } | { ID sect409r1 } |
+ { ID secp521r1 } | { ID sect571k1 } | { ID sect571r1 },
+ ... -- Extensible
+ }
+
+ -- Note in [X9.62] the curves are referred to as 'ansiX9' as
+ -- opposed to 'sec'. For example, secp192r1 is the same curve as
+ -- ansix9p192r1.
+
+ -- Note that in [PKI-ALG] the secp192r1 curve was referred to as
+ -- prime192v1 and the secp256r1 curve was referred to as
+ -- prime256v1.
+
+ -- Note that [FIPS186-3] refers to secp192r1 as P-192,
+ -- secp224r1 as P-224, secp256r1 as P-256, secp384r1 as P-384,
+ -- and secp521r1 as P-521.
+
+ secp192r1 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-X9-62(10045) curves(3)
+ prime(1) 1 }
+
+ sect163k1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 1 }
+
+ sect163r2 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 15 }
+
+ secp224r1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 33 }
+
+ sect233k1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 26 }
+
+ sect233r1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 27 }
+
+ secp256r1 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-X9-62(10045) curves(3)
+ prime(1) 7 }
+
+ sect283k1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 16 }
+
+ sect283r1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 17 }
+
+ secp384r1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 34 }
+
+ sect409k1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 36 }
+
+ sect409r1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 37 }
+
+ secp521r1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 35 }
+
+ sect571k1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 38 }
+
+ sect571r1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 39 }
+
+ -- RSA with MD-2
+
+ sa-rsaWithMD2 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER md2WithRSAEncryption
+ PARAMS TYPE NULL ARE required
+ HASHES { mda-md2 }
+ PUBLIC-KEYS { pk-rsa }
+ SMIME-CAPS { IDENTIFIED BY md2WithRSAEncryption }
+ }
+
+ md2WithRSAEncryption OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1)
+ pkcs-1(1) 2 }
+
+ -- RSA with MD-5
+
+ sa-rsaWithMD5 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER md5WithRSAEncryption
+ PARAMS TYPE NULL ARE required
+ HASHES { mda-md5 }
+ PUBLIC-KEYS { pk-rsa }
+ SMIME-CAPS { IDENTIFIED BY md5WithRSAEncryption }
+ }
+
+ md5WithRSAEncryption OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1)
+ pkcs-1(1) 4 }
+
+ -- RSA with SHA-1
+
+ sa-rsaWithSHA1 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER sha1WithRSAEncryption
+ PARAMS TYPE NULL ARE required
+ HASHES { mda-sha1 }
+ PUBLIC-KEYS { pk-rsa }
+ SMIME-CAPS {IDENTIFIED BY sha1WithRSAEncryption }
+ }
+
+ sha1WithRSAEncryption OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1)
+ pkcs-1(1) 5 }
+
+ -- DSA with SHA-1
+
+ sa-dsaWithSHA1 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER dsa-with-sha1
+ VALUE DSA-Sig-Value
+ PARAMS TYPE NULL ARE absent
+ HASHES { mda-sha1 }
+ PUBLIC-KEYS { pk-dsa }
+ SMIME-CAPS { IDENTIFIED BY dsa-with-sha1 }
+ }
+
+ dsa-with-sha1 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) x9-57(10040) x9algorithm(4) 3 }
+
+ -- DSA with SHA-224
+
+ sa-dsaWithSHA224 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER dsa-with-sha224
+ VALUE DSA-Sig-Value
+ PARAMS TYPE NULL ARE absent
+ HASHES { mda-sha224 }
+ PUBLIC-KEYS { pk-dsa }
+ SMIME-CAPS { IDENTIFIED BY dsa-with-sha224 }
+ }
+
+ dsa-with-sha224 OBJECT IDENTIFIER ::= {
+ joint-iso-ccitt(2) country(16) us(840) organization(1) gov(101)
+ csor(3) algorithms(4) id-dsa-with-sha2(3) 1 }
+
+ -- DSA with SHA-256
+
+ sa-dsaWithSHA256 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER dsa-with-sha256
+ VALUE DSA-Sig-Value
+ PARAMS TYPE NULL ARE absent
+ HASHES { mda-sha256 }
+ PUBLIC-KEYS { pk-dsa }
+ SMIME-CAPS { IDENTIFIED BY dsa-with-sha256 }
+ }
+
+ dsa-with-sha256 OBJECT IDENTIFIER ::= {
+ joint-iso-ccitt(2) country(16) us(840) organization(1) gov(101)
+ csor(3) algorithms(4) id-dsa-with-sha2(3) 2 }
+
+ -- ECDSA with SHA-1
+
+ sa-ecdsaWithSHA1 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER ecdsa-with-SHA1
+ VALUE ECDSA-Sig-Value
+ PARAMS TYPE NULL ARE absent
+ HASHES { mda-sha1 }
+ PUBLIC-KEYS { pk-ec }
+ SMIME-CAPS {IDENTIFIED BY ecdsa-with-SHA1 }
+ }
+
+ ecdsa-with-SHA1 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-X9-62(10045)
+ signatures(4) 1 }
+
+ -- ECDSA with SHA-224
+
+ sa-ecdsaWithSHA224 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER ecdsa-with-SHA224
+ VALUE ECDSA-Sig-Value
+ PARAMS TYPE NULL ARE absent
+ HASHES { mda-sha224 }
+ PUBLIC-KEYS { pk-ec }
+ SMIME-CAPS { IDENTIFIED BY ecdsa-with-SHA224 }
+ }
+
+ ecdsa-with-SHA224 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-X9-62(10045) signatures(4)
+ ecdsa-with-SHA2(3) 1 }
+
+ -- ECDSA with SHA-256
+
+ sa-ecdsaWithSHA256 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER ecdsa-with-SHA256
+ VALUE ECDSA-Sig-Value
+ PARAMS TYPE NULL ARE absent
+ HASHES { mda-sha256 }
+ PUBLIC-KEYS { pk-ec }
+ SMIME-CAPS { IDENTIFIED BY ecdsa-with-SHA256 }
+ }
+
+ ecdsa-with-SHA256 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-X9-62(10045) signatures(4)
+ ecdsa-with-SHA2(3) 2 }
+
+ -- ECDSA with SHA-384
+
+ sa-ecdsaWithSHA384 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER ecdsa-with-SHA384
+ VALUE ECDSA-Sig-Value
+ PARAMS TYPE NULL ARE absent
+ HASHES { mda-sha384 }
+ PUBLIC-KEYS { pk-ec }
+ SMIME-CAPS { IDENTIFIED BY ecdsa-with-SHA384 }
+ }
+ ecdsa-with-SHA384 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-X9-62(10045) signatures(4)
+ ecdsa-with-SHA2(3) 3 }
+
+ -- ECDSA with SHA-512
+
+ sa-ecdsaWithSHA512 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER ecdsa-with-SHA512
+ VALUE ECDSA-Sig-Value
+ PARAMS TYPE NULL ARE absent
+ HASHES { mda-sha512 }
+ PUBLIC-KEYS { pk-ec }
+ SMIME-CAPS { IDENTIFIED BY ecdsa-with-SHA512 }
+ }
+
+ ecdsa-with-SHA512 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-X9-62(10045) signatures(4)
+ ecdsa-with-SHA2(3) 4 }
+
+ --
+ -- Signature Values
+ --
+
+ -- DSA
+
+ DSA-Sig-Value ::= SEQUENCE {
+ r INTEGER,
+ s INTEGER
+ }
+
+ -- ECDSA
+
+ ECDSA-Sig-Value ::= SEQUENCE {
+ r INTEGER,
+ s INTEGER
+ }
+
+ --
+ -- Message Digest Algorithms (mda-)
+ --
+
+ HashAlgs DIGEST-ALGORITHM ::= {
+ mda-md2 |
+ mda-md5 |
+ mda-sha1,
+ ... -- Extensible
+ }
+ -- MD-2
+
+ mda-md2 DIGEST-ALGORITHM ::= {
+ IDENTIFIER id-md2
+ PARAMS TYPE NULL ARE preferredAbsent
+ }
+
+ id-md2 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) rsadsi(113549)
+ digestAlgorithm(2) 2 }
+
+ -- MD-5
+
+ mda-md5 DIGEST-ALGORITHM ::= {
+ IDENTIFIER id-md5
+ PARAMS TYPE NULL ARE preferredAbsent
+ }
+
+ id-md5 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) rsadsi(113549)
+ digestAlgorithm(2) 5 }
+
+ -- SHA-1
+
+ mda-sha1 DIGEST-ALGORITHM ::= {
+ IDENTIFIER id-sha1
+ PARAMS TYPE NULL ARE preferredAbsent
+ }
+
+ id-sha1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) oiw(14) secsig(3)
+ algorithm(2) 26 }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAttributeCertificate-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAttributeCertificate-2009.asn1
new file mode 100644
index 0000000000..3ab074643f
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAttributeCertificate-2009.asn1
@@ -0,0 +1,292 @@
+ PKIXAttributeCertificate-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-attribute-cert-02(47)}
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ AttributeSet{}, Extensions{}, SecurityCategory{},
+ EXTENSION, ATTRIBUTE, SECURITY-CATEGORY
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57) }
+
+ AlgorithmIdentifier{}, SIGNATURE-ALGORITHM, DIGEST-ALGORITHM
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ -- IMPORTed module OIDs MAY change if [PKIXPROF] changes
+ -- PKIX Certificate Extensions
+
+ CertificateSerialNumber, UniqueIdentifier, id-pkix, id-pe, id-kp,
+ id-ad, id-at, SIGNED{}, SignatureAlgorithms
+ FROM PKIX1Explicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51)}
+
+ GeneralName, GeneralNames, id-ce, ext-AuthorityKeyIdentifier,
+ ext-AuthorityInfoAccess, ext-CRLDistributionPoints
+ FROM PKIX1Implicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)}
+
+ ContentInfo
+ FROM CryptographicMessageSyntax-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549)
+ pkcs(1) pkcs-9(9) smime(16) modules(0) id-mod-cms-2004-02(41) };
+ -- Define the set of extensions that can appear.
+ -- Some of these are imported from PKIX Cert
+
+ AttributeCertExtensions EXTENSION ::= {
+ ext-auditIdentity | ext-targetInformation |
+ ext-AuthorityKeyIdentifier | ext-AuthorityInfoAccess |
+ ext-CRLDistributionPoints | ext-noRevAvail | ext-ac-proxying |
+ ext-aaControls, ... }
+
+ ext-auditIdentity EXTENSION ::= { SYNTAX
+ OCTET STRING IDENTIFIED BY id-pe-ac-auditIdentity}
+
+ ext-targetInformation EXTENSION ::= { SYNTAX
+ Targets IDENTIFIED BY id-ce-targetInformation }
+
+ ext-noRevAvail EXTENSION ::= { SYNTAX
+ NULL IDENTIFIED BY id-ce-noRevAvail}
+
+ ext-ac-proxying EXTENSION ::= { SYNTAX
+ ProxyInfo IDENTIFIED BY id-pe-ac-proxying}
+
+ ext-aaControls EXTENSION ::= { SYNTAX
+ AAControls IDENTIFIED BY id-pe-aaControls}
+
+ -- Define the set of attributes used here
+
+ AttributesDefined ATTRIBUTE ::= { at-authenticationInfo |
+ at-accesIdentity | at-chargingIdentity | at-group |
+ at-role | at-clearance | at-encAttrs, ...}
+
+ at-authenticationInfo ATTRIBUTE ::= { TYPE SvceAuthInfo
+ IDENTIFIED BY id-aca-authenticationInfo}
+
+ at-accesIdentity ATTRIBUTE ::= { TYPE SvceAuthInfo
+ IDENTIFIED BY id-aca-accessIdentity}
+
+ at-chargingIdentity ATTRIBUTE ::= { TYPE IetfAttrSyntax
+ IDENTIFIED BY id-aca-chargingIdentity}
+
+ at-group ATTRIBUTE ::= { TYPE IetfAttrSyntax
+ IDENTIFIED BY id-aca-group}
+
+ at-role ATTRIBUTE ::= { TYPE RoleSyntax
+ IDENTIFIED BY id-at-role}
+
+ at-clearance ATTRIBUTE ::= { TYPE Clearance
+ IDENTIFIED BY id-at-clearance}
+ at-clearance-RFC3281 ATTRIBUTE ::= {TYPE Clearance-rfc3281
+ IDENTIFIED BY id-at-clearance-rfc3281 }
+
+ at-encAttrs ATTRIBUTE ::= { TYPE ContentInfo
+ IDENTIFIED BY id-aca-encAttrs}
+
+ --
+ -- OIDs used by Attribute Certificate Extensions
+ --
+
+ id-pe-ac-auditIdentity OBJECT IDENTIFIER ::= { id-pe 4 }
+ id-pe-aaControls OBJECT IDENTIFIER ::= { id-pe 6 }
+ id-pe-ac-proxying OBJECT IDENTIFIER ::= { id-pe 10 }
+ id-ce-targetInformation OBJECT IDENTIFIER ::= { id-ce 55 }
+ id-ce-noRevAvail OBJECT IDENTIFIER ::= { id-ce 56 }
+
+ --
+ -- OIDs used by Attribute Certificate Attributes
+ --
+
+ id-aca OBJECT IDENTIFIER ::= { id-pkix 10 }
+
+ id-aca-authenticationInfo OBJECT IDENTIFIER ::= { id-aca 1 }
+ id-aca-accessIdentity OBJECT IDENTIFIER ::= { id-aca 2 }
+ id-aca-chargingIdentity OBJECT IDENTIFIER ::= { id-aca 3 }
+ id-aca-group OBJECT IDENTIFIER ::= { id-aca 4 }
+ -- { id-aca 5 } is reserved
+ id-aca-encAttrs OBJECT IDENTIFIER ::= { id-aca 6 }
+
+ id-at-role OBJECT IDENTIFIER ::= { id-at 72}
+ id-at-clearance OBJECT IDENTIFIER ::= {
+ joint-iso-ccitt(2) ds(5) attributeType(4) clearance (55) }
+
+ -- Uncomment the following declaration and comment the above line if
+ -- using the id-at-clearance attribute as defined in [RFC3281]
+ -- id-at-clearance ::= id-at-clearance-3281
+
+ id-at-clearance-rfc3281 OBJECT IDENTIFIER ::= {
+ joint-iso-ccitt(2) ds(5) module(1) selected-attribute-types(5)
+ clearance (55) }
+
+ --
+ -- The syntax of an Attribute Certificate
+ --
+
+ AttributeCertificate ::= SIGNED{AttributeCertificateInfo}
+
+ AttributeCertificateInfo ::= SEQUENCE {
+ version AttCertVersion, -- version is v2
+ holder Holder,
+ issuer AttCertIssuer,
+ signature AlgorithmIdentifier{SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}},
+ serialNumber CertificateSerialNumber,
+ attrCertValidityPeriod AttCertValidityPeriod,
+ attributes SEQUENCE OF
+ AttributeSet{{AttributesDefined}},
+ issuerUniqueID UniqueIdentifier OPTIONAL,
+ extensions Extensions{{AttributeCertExtensions}} OPTIONAL
+ }
+
+ AttCertVersion ::= INTEGER { v2(1) }
+
+ Holder ::= SEQUENCE {
+ baseCertificateID [0] IssuerSerial OPTIONAL,
+ -- the issuer and serial number of
+ -- the holder's Public Key Certificate
+ entityName [1] GeneralNames OPTIONAL,
+ -- the name of the claimant or role
+ objectDigestInfo [2] ObjectDigestInfo OPTIONAL
+ -- used to directly authenticate the
+ -- holder, for example, an executable
+ }
+
+ ObjectDigestInfo ::= SEQUENCE {
+ digestedObjectType ENUMERATED {
+ publicKey (0),
+ publicKeyCert (1),
+ otherObjectTypes (2) },
+ -- otherObjectTypes MUST NOT
+ -- be used in this profile
+ otherObjectTypeID OBJECT IDENTIFIER OPTIONAL,
+ digestAlgorithm AlgorithmIdentifier{DIGEST-ALGORITHM, {...}},
+ objectDigest BIT STRING
+ }
+
+ AttCertIssuer ::= CHOICE {
+ v1Form GeneralNames, -- MUST NOT be used in this
+ -- profile
+ v2Form [0] V2Form -- v2 only
+ }
+
+ V2Form ::= SEQUENCE {
+ issuerName GeneralNames OPTIONAL,
+ baseCertificateID [0] IssuerSerial OPTIONAL,
+ objectDigestInfo [1] ObjectDigestInfo OPTIONAL
+ -- issuerName MUST be present in this profile
+ -- baseCertificateID and objectDigestInfo MUST
+ -- NOT be present in this profile
+ }
+
+ IssuerSerial ::= SEQUENCE {
+ issuer GeneralNames,
+ serial CertificateSerialNumber,
+ issuerUID UniqueIdentifier OPTIONAL
+ }
+
+ AttCertValidityPeriod ::= SEQUENCE {
+ notBeforeTime GeneralizedTime,
+ notAfterTime GeneralizedTime
+ }
+
+ --
+ -- Syntax used by Attribute Certificate Extensions
+ --
+
+ Targets ::= SEQUENCE OF Target
+
+ Target ::= CHOICE {
+ targetName [0] GeneralName,
+ targetGroup [1] GeneralName,
+ targetCert [2] TargetCert
+ }
+
+ TargetCert ::= SEQUENCE {
+ targetCertificate IssuerSerial,
+ targetName GeneralName OPTIONAL,
+ certDigestInfo ObjectDigestInfo OPTIONAL
+ }
+
+ AAControls ::= SEQUENCE {
+ pathLenConstraint INTEGER (0..MAX) OPTIONAL,
+ permittedAttrs [0] AttrSpec OPTIONAL,
+ excludedAttrs [1] AttrSpec OPTIONAL,
+ permitUnSpecified BOOLEAN DEFAULT TRUE
+ }
+
+ AttrSpec::= SEQUENCE OF OBJECT IDENTIFIER
+
+ ProxyInfo ::= SEQUENCE OF Targets
+
+ --
+ -- Syntax used by Attribute Certificate Attributes
+ --
+ IetfAttrSyntax ::= SEQUENCE {
+ policyAuthority[0] GeneralNames OPTIONAL,
+ values SEQUENCE OF CHOICE {
+ octets OCTET STRING,
+ oid OBJECT IDENTIFIER,
+ string UTF8String
+ }
+ }
+
+ SvceAuthInfo ::= SEQUENCE {
+ service GeneralName,
+ ident GeneralName,
+ authInfo OCTET STRING OPTIONAL
+ }
+
+ RoleSyntax ::= SEQUENCE {
+ roleAuthority [0] GeneralNames OPTIONAL,
+ roleName [1] GeneralName
+ }
+
+ Clearance ::= SEQUENCE {
+ policyId OBJECT IDENTIFIER,
+ classList ClassList DEFAULT {unclassified},
+ securityCategories SET OF SecurityCategory
+ {{SupportedSecurityCategories}} OPTIONAL
+ }
+
+ -- Uncomment the following lines to support deprecated clearance
+ -- syntax and comment out previous Clearance.
+
+ -- Clearance ::= Clearance-rfc3281
+
+ Clearance-rfc3281 ::= SEQUENCE {
+ policyId [0] OBJECT IDENTIFIER,
+ classList [1] ClassList DEFAULT {unclassified},
+ securityCategories [2] SET OF SecurityCategory-rfc3281
+ {{SupportedSecurityCategories}} OPTIONAL
+ }
+
+ ClassList ::= BIT STRING {
+ unmarked (0),
+ unclassified (1),
+ restricted (2),
+ confidential (3),
+ secret (4),
+ topSecret (5)
+ }
+ SupportedSecurityCategories SECURITY-CATEGORY ::= { ... }
+
+ SecurityCategory-rfc3281{SECURITY-CATEGORY:Supported} ::= SEQUENCE {
+ type [0] IMPLICIT SECURITY-CATEGORY.
+ &id({Supported}),
+ value [1] EXPLICIT SECURITY-CATEGORY.
+ &Type({Supported}{@type})
+ }
+
+ ACClearAttrs ::= SEQUENCE {
+ acIssuer GeneralName,
+ acSerial INTEGER,
+ attrs SEQUENCE OF AttributeSet{{AttributesDefined}}
+ }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCMP-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCMP-2009.asn1
new file mode 100644
index 0000000000..968a142f28
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCMP-2009.asn1
@@ -0,0 +1,495 @@
+ PKIXCMP-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-cmp2000-02(50) }
+ DEFINITIONS EXPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ AttributeSet{}, Extensions{}, EXTENSION, ATTRIBUTE
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57)}
+
+ AlgorithmIdentifier{}, SIGNATURE-ALGORITHM, ALGORITHM,
+ DIGEST-ALGORITHM, MAC-ALGORITHM
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ Certificate, CertificateList
+ FROM PKIX1Explicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51)}
+
+ GeneralName, KeyIdentifier
+ FROM PKIX1Implicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)}
+
+ CertTemplate, PKIPublicationInfo, EncryptedValue, CertId,
+ CertReqMessages
+ FROM PKIXCRMF-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-crmf2005-02(55) }
+ -- see also the behavioral clarifications to CRMF codified in
+ -- Appendix C of this specification
+
+ CertificationRequest
+ FROM PKCS-10
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkcs10-2009(69)}
+ -- (specified in RFC 2986 with 1993 ASN.1 syntax and IMPLICIT
+ -- tags). Alternatively, implementers may directly include
+ -- the [PKCS10] syntax in this module
+ ;
+
+ -- the rest of the module contains locally defined OIDs and
+ -- constructs
+
+ CMPCertificate ::= CHOICE { x509v3PKCert Certificate, ... }
+ -- This syntax, while bits-on-the-wire compatible with the
+ -- standard X.509 definition of "Certificate", allows the
+ -- possibility of future certificate types (such as X.509
+ -- attribute certificates, WAP WTLS certificates, or other kinds
+ -- of certificates) within this certificate management protocol,
+ -- should a need ever arise to support such generality. Those
+ -- implementations that do not foresee a need to ever support
+ -- other certificate types MAY, if they wish, comment out the
+ -- above structure and "uncomment" the following one prior to
+ -- compiling this ASN.1 module. (Note that interoperability
+ -- with implementations that don't do this will be unaffected by
+ -- this change.)
+
+ -- CMPCertificate ::= Certificate
+
+ PKIMessage ::= SEQUENCE {
+ header PKIHeader,
+ body PKIBody,
+ protection [0] PKIProtection OPTIONAL,
+ extraCerts [1] SEQUENCE SIZE (1..MAX) OF CMPCertificate
+ OPTIONAL }
+
+ PKIMessages ::= SEQUENCE SIZE (1..MAX) OF PKIMessage
+
+ PKIHeader ::= SEQUENCE {
+ pvno INTEGER { cmp1999(1), cmp2000(2) },
+ sender GeneralName,
+ -- identifies the sender
+ recipient GeneralName,
+ -- identifies the intended recipient
+ messageTime [0] GeneralizedTime OPTIONAL,
+ -- time of production of this message (used when sender
+ -- believes that the transport will be "suitable"; i.e.,
+ -- that the time will still be meaningful upon receipt)
+ protectionAlg [1] AlgorithmIdentifier{ALGORITHM, {...}}
+ OPTIONAL,
+ -- algorithm used for calculation of protection bits
+ senderKID [2] KeyIdentifier OPTIONAL,
+ recipKID [3] KeyIdentifier OPTIONAL,
+ -- to identify specific keys used for protection
+ transactionID [4] OCTET STRING OPTIONAL,
+ -- identifies the transaction; i.e., this will be the same in
+ -- corresponding request, response, certConf, and PKIConf
+ -- messages
+ senderNonce [5] OCTET STRING OPTIONAL,
+ recipNonce [6] OCTET STRING OPTIONAL,
+ -- nonces used to provide replay protection, senderNonce
+ -- is inserted by the creator of this message; recipNonce
+ -- is a nonce previously inserted in a related message by
+ -- the intended recipient of this message
+ freeText [7] PKIFreeText OPTIONAL,
+ -- this may be used to indicate context-specific instructions
+ -- (this field is intended for human consumption)
+ generalInfo [8] SEQUENCE SIZE (1..MAX) OF
+ InfoTypeAndValue OPTIONAL
+ -- this may be used to convey context-specific information
+ -- (this field not primarily intended for human consumption)
+ }
+
+ PKIFreeText ::= SEQUENCE SIZE (1..MAX) OF UTF8String
+ -- text encoded as UTF-8 String [RFC3629] (note: each
+ -- UTF8String MAY include an [RFC3066] language tag
+ -- to indicate the language of the contained text;
+ -- see [RFC2482] for details)
+
+ PKIBody ::= CHOICE { -- message-specific body elements
+ ir [0] CertReqMessages, --Initialization Request
+ ip [1] CertRepMessage, --Initialization Response
+ cr [2] CertReqMessages, --Certification Request
+ cp [3] CertRepMessage, --Certification Response
+ p10cr [4] CertificationRequest, --imported from [PKCS10]
+ popdecc [5] POPODecKeyChallContent, --pop Challenge
+ popdecr [6] POPODecKeyRespContent, --pop Response
+ kur [7] CertReqMessages, --Key Update Request
+ kup [8] CertRepMessage, --Key Update Response
+ krr [9] CertReqMessages, --Key Recovery Request
+ krp [10] KeyRecRepContent, --Key Recovery Response
+ rr [11] RevReqContent, --Revocation Request
+ rp [12] RevRepContent, --Revocation Response
+ ccr [13] CertReqMessages, --Cross-Cert. Request
+ ccp [14] CertRepMessage, --Cross-Cert. Response
+ ckuann [15] CAKeyUpdAnnContent, --CA Key Update Ann.
+ cann [16] CertAnnContent, --Certificate Ann.
+ rann [17] RevAnnContent, --Revocation Ann.
+ crlann [18] CRLAnnContent, --CRL Announcement
+ pkiconf [19] PKIConfirmContent, --Confirmation
+ nested [20] NestedMessageContent, --Nested Message
+ genm [21] GenMsgContent, --General Message
+ genp [22] GenRepContent, --General Response
+ error [23] ErrorMsgContent, --Error Message
+ certConf [24] CertConfirmContent, --Certificate confirm
+ pollReq [25] PollReqContent, --Polling request
+ pollRep [26] PollRepContent --Polling response
+ }
+
+ PKIProtection ::= BIT STRING
+
+ ProtectedPart ::= SEQUENCE {
+ header PKIHeader,
+ body PKIBody }
+
+ id-PasswordBasedMac OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ usa(840) nt(113533) nsn(7) algorithms(66) 13 }
+ PBMParameter ::= SEQUENCE {
+ salt OCTET STRING,
+ -- note: implementations MAY wish to limit acceptable sizes
+ -- of this string to values appropriate for their environment
+ -- in order to reduce the risk of denial-of-service attacks
+ owf AlgorithmIdentifier{DIGEST-ALGORITHM, {...}},
+ -- AlgId for a One-Way Function (SHA-1 recommended)
+ iterationCount INTEGER,
+ -- number of times the OWF is applied
+ -- note: implementations MAY wish to limit acceptable sizes
+ -- of this integer to values appropriate for their environment
+ -- in order to reduce the risk of denial-of-service attacks
+ mac AlgorithmIdentifier{MAC-ALGORITHM, {...}}
+ -- the MAC AlgId (e.g., DES-MAC, Triple-DES-MAC [PKCS11],
+ -- or HMAC [RFC2104, RFC2202])
+ }
+
+ id-DHBasedMac OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ usa(840) nt(113533) nsn(7) algorithms(66) 30 }
+ DHBMParameter ::= SEQUENCE {
+ owf AlgorithmIdentifier{DIGEST-ALGORITHM, {...}},
+ -- AlgId for a One-Way Function (SHA-1 recommended)
+ mac AlgorithmIdentifier{MAC-ALGORITHM, {...}}
+ -- the MAC AlgId (e.g., DES-MAC, Triple-DES-MAC [PKCS11],
+ -- or HMAC [RFC2104, RFC2202])
+ }
+
+ PKIStatus ::= INTEGER {
+ accepted (0),
+ -- you got exactly what you asked for
+ grantedWithMods (1),
+ -- you got something like what you asked for; the
+ -- requester is responsible for ascertaining the differences
+ rejection (2),
+ -- you don't get it, more information elsewhere in the message
+ waiting (3),
+ -- the request body part has not yet been processed; expect to
+ -- hear more later (note: proper handling of this status
+ -- response MAY use the polling req/rep PKIMessages specified
+ -- in Section 5.3.22; alternatively, polling in the underlying
+ -- transport layer MAY have some utility in this regard)
+ revocationWarning (4),
+ -- this message contains a warning that a revocation is
+ -- imminent
+ revocationNotification (5),
+ -- notification that a revocation has occurred
+ keyUpdateWarning (6)
+ -- update already done for the oldCertId specified in
+ -- CertReqMsg
+ }
+
+ PKIFailureInfo ::= BIT STRING {
+ -- since we can fail in more than one way!
+ -- More codes may be added in the future if/when required.
+ badAlg (0),
+ -- unrecognized or unsupported Algorithm Identifier
+ badMessageCheck (1),
+ -- integrity check failed (e.g., signature did not verify)
+ badRequest (2),
+ -- transaction not permitted or supported
+ badTime (3),
+ -- messageTime was not sufficiently close to the system time,
+ -- as defined by local policy
+ badCertId (4),
+ -- no certificate could be found matching the provided criteria
+ badDataFormat (5),
+ -- the data submitted has the wrong format
+ wrongAuthority (6),
+ -- the authority indicated in the request is different from the
+ -- one creating the response token
+ incorrectData (7),
+ -- the requester's data is incorrect (for notary services)
+ missingTimeStamp (8),
+ -- when the timestamp is missing but should be there
+ -- (by policy)
+ badPOP (9),
+ -- the proof-of-possession failed
+ certRevoked (10),
+ -- the certificate has already been revoked
+ certConfirmed (11),
+ -- the certificate has already been confirmed
+ wrongIntegrity (12),
+ -- invalid integrity, password based instead of signature or
+ -- vice versa
+ badRecipientNonce (13),
+ -- invalid recipient nonce, either missing or wrong value
+ timeNotAvailable (14),
+ -- the TSA's time source is not available
+ unacceptedPolicy (15),
+ -- the requested TSA policy is not supported by the TSA
+ unacceptedExtension (16),
+ -- the requested extension is not supported by the TSA
+ addInfoNotAvailable (17),
+ -- the additional information requested could not be
+ -- understood or is not available
+ badSenderNonce (18),
+ -- invalid sender nonce, either missing or wrong size
+ badCertTemplate (19),
+ -- invalid cert. template or missing mandatory information
+ signerNotTrusted (20),
+ -- signer of the message unknown or not trusted
+ transactionIdInUse (21),
+ -- the transaction identifier is already in use
+ unsupportedVersion (22),
+ -- the version of the message is not supported
+ notAuthorized (23),
+ -- the sender was not authorized to make the preceding
+ -- request or perform the preceding action
+ systemUnavail (24),
+ -- the request cannot be handled due to system unavailability
+ systemFailure (25),
+ -- the request cannot be handled due to system failure
+ duplicateCertReq (26)
+ -- certificate cannot be issued because a duplicate
+ -- certificate already exists
+ }
+
+ PKIStatusInfo ::= SEQUENCE {
+ status PKIStatus,
+ statusString PKIFreeText OPTIONAL,
+ failInfo PKIFailureInfo OPTIONAL }
+
+ OOBCert ::= CMPCertificate
+
+ OOBCertHash ::= SEQUENCE {
+ hashAlg [0] AlgorithmIdentifier{DIGEST-ALGORITHM, {...}}
+ OPTIONAL,
+ certId [1] CertId OPTIONAL,
+ hashVal BIT STRING
+ -- hashVal is calculated over the DER encoding of the
+ -- self-signed certificate with the identifier certID.
+ }
+
+ POPODecKeyChallContent ::= SEQUENCE OF Challenge
+ -- One Challenge per encryption key certification request (in the
+ -- same order as these requests appear in CertReqMessages).
+
+ Challenge ::= SEQUENCE {
+ owf AlgorithmIdentifier{DIGEST-ALGORITHM, {...}}
+ OPTIONAL,
+ -- MUST be present in the first Challenge; MAY be omitted in
+ -- any subsequent Challenge in POPODecKeyChallContent (if
+ -- omitted, then the owf used in the immediately preceding
+ -- Challenge is to be used).
+ witness OCTET STRING,
+ -- the result of applying the one-way function (owf) to a
+ -- randomly-generated INTEGER, A. [Note that a different
+ -- INTEGER MUST be used for each Challenge.]
+ challenge OCTET STRING
+ -- the encryption (under the public key for which the cert.
+ -- request is being made) of Rand, where Rand is specified as
+ -- Rand ::= SEQUENCE {
+ -- int INTEGER,
+ -- - the randomly-generated INTEGER A (above)
+ -- sender GeneralName
+ -- - the sender's name (as included in PKIHeader)
+ -- }
+ }
+
+ POPODecKeyRespContent ::= SEQUENCE OF INTEGER
+ -- One INTEGER per encryption key certification request (in the
+ -- same order as these requests appear in CertReqMessages). The
+ -- retrieved INTEGER A (above) is returned to the sender of the
+ -- corresponding Challenge.
+
+ CertRepMessage ::= SEQUENCE {
+ caPubs [1] SEQUENCE SIZE (1..MAX) OF CMPCertificate
+ OPTIONAL,
+ response SEQUENCE OF CertResponse }
+
+ CertResponse ::= SEQUENCE {
+ certReqId INTEGER,
+ -- to match this response with the corresponding request (a value
+ -- of -1 is to be used if certReqId is not specified in the
+ -- corresponding request)
+ status PKIStatusInfo,
+ certifiedKeyPair CertifiedKeyPair OPTIONAL,
+ rspInfo OCTET STRING OPTIONAL
+ -- analogous to the id-regInfo-utf8Pairs string defined
+ -- for regInfo in CertReqMsg [RFC4211]
+ }
+
+ CertifiedKeyPair ::= SEQUENCE {
+ certOrEncCert CertOrEncCert,
+ privateKey [0] EncryptedValue OPTIONAL,
+ -- see [RFC4211] for comment on encoding
+ publicationInfo [1] PKIPublicationInfo OPTIONAL }
+
+ CertOrEncCert ::= CHOICE {
+ certificate [0] CMPCertificate,
+ encryptedCert [1] EncryptedValue }
+ KeyRecRepContent ::= SEQUENCE {
+ status PKIStatusInfo,
+ newSigCert [0] CMPCertificate OPTIONAL,
+ caCerts [1] SEQUENCE SIZE (1..MAX) OF
+ CMPCertificate OPTIONAL,
+ keyPairHist [2] SEQUENCE SIZE (1..MAX) OF
+ CertifiedKeyPair OPTIONAL }
+
+ RevReqContent ::= SEQUENCE OF RevDetails
+
+ RevDetails ::= SEQUENCE {
+ certDetails CertTemplate,
+ -- allows requester to specify as much as they can about
+ -- the cert. for which revocation is requested
+ -- (e.g., for cases in which serialNumber is not available)
+ crlEntryDetails Extensions{{...}} OPTIONAL
+ -- requested crlEntryExtensions
+ }
+
+ RevRepContent ::= SEQUENCE {
+ status SEQUENCE SIZE (1..MAX) OF PKIStatusInfo,
+ -- in same order as was sent in RevReqContent
+ revCerts [0] SEQUENCE SIZE (1..MAX) OF CertId OPTIONAL,
+ -- IDs for which revocation was requested
+ -- (same order as status)
+ crls [1] SEQUENCE SIZE (1..MAX) OF CertificateList OPTIONAL
+ -- the resulting CRLs (there may be more than one)
+ }
+
+ CAKeyUpdAnnContent ::= SEQUENCE {
+ oldWithNew CMPCertificate, -- old pub signed with new priv
+ newWithOld CMPCertificate, -- new pub signed with old priv
+ newWithNew CMPCertificate -- new pub signed with new priv
+ }
+
+ CertAnnContent ::= CMPCertificate
+
+ RevAnnContent ::= SEQUENCE {
+ status PKIStatus,
+ certId CertId,
+ willBeRevokedAt GeneralizedTime,
+ badSinceDate GeneralizedTime,
+ crlDetails Extensions{{...}} OPTIONAL
+ -- extra CRL details (e.g., crl number, reason, location, etc.)
+ }
+
+ CRLAnnContent ::= SEQUENCE OF CertificateList
+ PKIConfirmContent ::= NULL
+
+ NestedMessageContent ::= PKIMessages
+
+ INFO-TYPE-AND-VALUE ::= TYPE-IDENTIFIER
+
+ InfoTypeAndValue ::= SEQUENCE {
+ infoType INFO-TYPE-AND-VALUE.
+ &id({SupportedInfoSet}),
+ infoValue INFO-TYPE-AND-VALUE.
+ &Type({SupportedInfoSet}{@infoType}) }
+
+ SupportedInfoSet INFO-TYPE-AND-VALUE ::= { ... }
+
+ -- Example InfoTypeAndValue contents include, but are not limited
+ -- to, the following (uncomment in this ASN.1 module and use as
+ -- appropriate for a given environment):
+ --
+ -- id-it-caProtEncCert OBJECT IDENTIFIER ::= {id-it 1}
+ -- CAProtEncCertValue ::= CMPCertificate
+ -- id-it-signKeyPairTypes OBJECT IDENTIFIER ::= {id-it 2}
+ -- SignKeyPairTypesValue ::= SEQUENCE OF
+ -- AlgorithmIdentifier{{...}}
+ -- id-it-encKeyPairTypes OBJECT IDENTIFIER ::= {id-it 3}
+ -- EncKeyPairTypesValue ::= SEQUENCE OF
+ -- AlgorithmIdentifier{{...}}
+ -- id-it-preferredSymmAlg OBJECT IDENTIFIER ::= {id-it 4}
+ -- PreferredSymmAlgValue ::= AlgorithmIdentifier{{...}}
+ -- id-it-caKeyUpdateInfo OBJECT IDENTIFIER ::= {id-it 5}
+ -- CAKeyUpdateInfoValue ::= CAKeyUpdAnnContent
+ -- id-it-currentCRL OBJECT IDENTIFIER ::= {id-it 6}
+ -- CurrentCRLValue ::= CertificateList
+ -- id-it-unsupportedOIDs OBJECT IDENTIFIER ::= {id-it 7}
+ -- UnsupportedOIDsValue ::= SEQUENCE OF OBJECT IDENTIFIER
+ -- id-it-keyPairParamReq OBJECT IDENTIFIER ::= {id-it 10}
+ -- KeyPairParamReqValue ::= OBJECT IDENTIFIER
+ -- id-it-keyPairParamRep OBJECT IDENTIFIER ::= {id-it 11}
+ -- KeyPairParamRepValue ::= AlgorithmIdentifer
+ -- id-it-revPassphrase OBJECT IDENTIFIER ::= {id-it 12}
+ -- RevPassphraseValue ::= EncryptedValue
+ -- id-it-implicitConfirm OBJECT IDENTIFIER ::= {id-it 13}
+ -- ImplicitConfirmValue ::= NULL
+ -- id-it-confirmWaitTime OBJECT IDENTIFIER ::= {id-it 14}
+ -- ConfirmWaitTimeValue ::= GeneralizedTime
+ -- id-it-origPKIMessage OBJECT IDENTIFIER ::= {id-it 15}
+ -- OrigPKIMessageValue ::= PKIMessages
+ -- id-it-suppLangTags OBJECT IDENTIFIER ::= {id-it 16}
+ -- SuppLangTagsValue ::= SEQUENCE OF UTF8String
+ --
+ -- where
+ --
+ -- id-pkix OBJECT IDENTIFIER ::= {
+ -- iso(1) identified-organization(3)
+ -- dod(6) internet(1) security(5) mechanisms(5) pkix(7)}
+ -- and
+ -- id-it OBJECT IDENTIFIER ::= {id-pkix 4}
+ --
+ --
+ -- This construct MAY also be used to define new PKIX Certificate
+ -- Management Protocol request and response messages, or general-
+ -- purpose (e.g., announcement) messages for future needs or for
+ -- specific environments.
+
+ GenMsgContent ::= SEQUENCE OF InfoTypeAndValue
+
+ -- May be sent by EE, RA, or CA (depending on message content).
+ -- The OPTIONAL infoValue parameter of InfoTypeAndValue will
+ -- typically be omitted for some of the examples given above.
+ -- The receiver is free to ignore any contained OBJECT IDs that it
+ -- does not recognize. If sent from EE to CA, the empty set
+ -- indicates that the CA may send
+ -- any/all information that it wishes.
+
+ GenRepContent ::= SEQUENCE OF InfoTypeAndValue
+ -- Receiver MAY ignore any contained OIDs that it does not
+ -- recognize.
+
+ ErrorMsgContent ::= SEQUENCE {
+ pKIStatusInfo PKIStatusInfo,
+ errorCode INTEGER OPTIONAL,
+ -- implementation-specific error codes
+ errorDetails PKIFreeText OPTIONAL
+ -- implementation-specific error details
+ }
+
+ CertConfirmContent ::= SEQUENCE OF CertStatus
+
+ CertStatus ::= SEQUENCE {
+ certHash OCTET STRING,
+ -- the hash of the certificate, using the same hash algorithm
+ -- as is used to create and verify the certificate signature
+ certReqId INTEGER,
+ -- to match this confirmation with the corresponding req/rep
+ statusInfo PKIStatusInfo OPTIONAL }
+
+ PollReqContent ::= SEQUENCE OF SEQUENCE {
+ certReqId INTEGER }
+
+ PollRepContent ::= SEQUENCE OF SEQUENCE {
+ certReqId INTEGER,
+ checkAfter INTEGER, -- time in seconds
+ reason PKIFreeText OPTIONAL }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCRMF-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCRMF-2009.asn1
new file mode 100644
index 0000000000..1c0b780499
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCRMF-2009.asn1
@@ -0,0 +1,409 @@
+ PKIXCRMF-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-crmf2005-02(55)}
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ AttributeSet{}, Extensions{}, EXTENSION, ATTRIBUTE,
+ SingleAttribute{}
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1)
+ security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkixCommon-02(57) }
+
+ AlgorithmIdentifier{}, SIGNATURE-ALGORITHM, ALGORITHM,
+ DIGEST-ALGORITHM, MAC-ALGORITHM, PUBLIC-KEY
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ Version, Name, Time, SubjectPublicKeyInfo, UniqueIdentifier, id-pkix,
+ SignatureAlgorithms
+ FROM PKIX1Explicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51)}
+
+ GeneralName, CertExtensions
+ FROM PKIX1Implicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)}
+
+ EnvelopedData, CONTENT-TYPE
+ FROM CryptographicMessageSyntax-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cms-2004-02(41)}
+ maca-hMAC-SHA1
+ FROM CryptographicMessageSyntaxAlgorithms-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cmsalg-2001-02(37) }
+
+ mda-sha1
+ FROM PKIXAlgs-2009
+ { iso(1) identified-organization(3) dod(6)
+ internet(1) security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-algorithms2008-02(56) } ;
+
+ -- arc for Internet X.509 PKI protocols and their components
+
+ id-pkip OBJECT IDENTIFIER ::= { id-pkix 5 }
+
+ id-smime OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs9(9) 16 }
+
+ id-ct OBJECT IDENTIFIER ::= { id-smime 1 } -- content types
+
+ -- Core definitions for this module
+
+ CertReqMessages ::= SEQUENCE SIZE (1..MAX) OF CertReqMsg
+
+ CertReqMsg ::= SEQUENCE {
+ certReq CertRequest,
+ popo ProofOfPossession OPTIONAL,
+ -- content depends upon key type
+ regInfo SEQUENCE SIZE(1..MAX) OF
+ SingleAttribute{{RegInfoSet}} OPTIONAL }
+
+ CertRequest ::= SEQUENCE {
+ certReqId INTEGER,
+ -- ID for matching request and reply
+ certTemplate CertTemplate,
+ -- Selected fields of cert to be issued
+ controls Controls OPTIONAL }
+ -- Attributes affecting issuance
+
+ CertTemplate ::= SEQUENCE {
+ version [0] Version OPTIONAL,
+ serialNumber [1] INTEGER OPTIONAL,
+ signingAlg [2] AlgorithmIdentifier{SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}} OPTIONAL,
+ issuer [3] Name OPTIONAL,
+ validity [4] OptionalValidity OPTIONAL,
+ subject [5] Name OPTIONAL,
+ publicKey [6] SubjectPublicKeyInfo OPTIONAL,
+ issuerUID [7] UniqueIdentifier OPTIONAL,
+ subjectUID [8] UniqueIdentifier OPTIONAL,
+ extensions [9] Extensions{{CertExtensions}} OPTIONAL }
+
+ OptionalValidity ::= SEQUENCE {
+ notBefore [0] Time OPTIONAL,
+ notAfter [1] Time OPTIONAL } -- at least one MUST be present
+
+ Controls ::= SEQUENCE SIZE(1..MAX) OF SingleAttribute
+ {{RegControlSet}}
+
+ ProofOfPossession ::= CHOICE {
+ raVerified [0] NULL,
+ -- used if the RA has already verified that the requester is in
+ -- possession of the private key
+ signature [1] POPOSigningKey,
+ keyEncipherment [2] POPOPrivKey,
+ keyAgreement [3] POPOPrivKey }
+
+ POPOSigningKey ::= SEQUENCE {
+ poposkInput [0] POPOSigningKeyInput OPTIONAL,
+ algorithmIdentifier AlgorithmIdentifier{SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}},
+ signature BIT STRING }
+ -- The signature (using "algorithmIdentifier") is on the
+ -- DER-encoded value of poposkInput. NOTE: If the CertReqMsg
+ -- certReq CertTemplate contains the subject and publicKey values,
+ -- then poposkInput MUST be omitted and the signature MUST be
+ -- computed over the DER-encoded value of CertReqMsg certReq. If
+ -- the CertReqMsg certReq CertTemplate does not contain both the
+ -- public key and subject values (i.e., if it contains only one
+ -- of these, or neither), then poposkInput MUST be present and
+ -- MUST be signed.
+
+ POPOSigningKeyInput ::= SEQUENCE {
+ authInfo CHOICE {
+ sender [0] GeneralName,
+ -- used only if an authenticated identity has been
+ -- established for the sender (e.g., a DN from a
+ -- previously-issued and currently-valid certificate)
+ publicKeyMAC PKMACValue },
+ -- used if no authenticated GeneralName currently exists for
+ -- the sender; publicKeyMAC contains a password-based MAC
+ -- on the DER-encoded value of publicKey
+ publicKey SubjectPublicKeyInfo } -- from CertTemplate
+
+ PKMACValue ::= SEQUENCE {
+ algId AlgorithmIdentifier{MAC-ALGORITHM,
+ {Password-MACAlgorithms}},
+ value BIT STRING }
+
+ --
+ -- Define the currently only acceptable MAC algorithm to be used
+ -- for the PKMACValue structure
+ --
+
+ id-PasswordBasedMac OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ usa(840) nt(113533) nsn(7) algorithms(66) 13 }
+
+ Password-MACAlgorithms MAC-ALGORITHM ::= {
+ {IDENTIFIER id-PasswordBasedMac
+ PARAMS TYPE PBMParameter ARE required
+ IS-KEYED-MAC TRUE
+ }, ...
+ }
+
+ PBMParameter ::= SEQUENCE {
+ salt OCTET STRING,
+ owf AlgorithmIdentifier{DIGEST-ALGORITHM,
+ {DigestAlgorithms}},
+ -- AlgId for a One-Way Function (SHA-1 recommended)
+ iterationCount INTEGER,
+ -- number of times the OWF is applied
+ mac AlgorithmIdentifier{MAC-ALGORITHM,
+ {MACAlgorithms}}
+ -- the MAC AlgId (e.g., DES-MAC, Triple-DES-MAC, or HMAC
+ }
+
+ DigestAlgorithms DIGEST-ALGORITHM ::= {
+ mda-sha1, ...
+ }
+
+ MACAlgorithms MAC-ALGORITHM ::= {
+ -- The modules containing the ASN.1 for the DES and 3DES MAC
+ -- algorithms have not been updated at the time that this is
+ -- being published. Users of this module should define the
+ -- appropriate MAC-ALGORITHM objects and uncomment the
+ -- following lines if they support these MAC algorithms.
+ -- maca-des-mac | maca-3des-mac --
+ maca-hMAC-SHA1,
+ ...
+ }
+
+ POPOPrivKey ::= CHOICE {
+ thisMessage [0] BIT STRING, -- Deprecated
+ -- possession is proven in this message (which contains
+ -- the private key itself (encrypted for the CA))
+ subsequentMessage [1] SubsequentMessage,
+ -- possession will be proven in a subsequent message
+ dhMAC [2] BIT STRING, -- Deprecated
+ agreeMAC [3] PKMACValue,
+ encryptedKey [4] EnvelopedData }
+ -- for keyAgreement (only), possession is proven in this message
+ -- (which contains a MAC (over the DER-encoded value of the
+ -- certReq parameter in CertReqMsg, which MUST include both
+ -- subject and publicKey) based on a key derived from the end
+ -- entity's private DH key and the CA's public DH key);
+
+ SubsequentMessage ::= INTEGER {
+ encrCert (0),
+ -- requests that resulting certificate be encrypted for the
+ -- end entity (following which, POP will be proven in a
+ -- confirmation message)
+ challengeResp (1) }
+ -- requests that CA engage in challenge-response exchange with
+ -- end entity in order to prove private key possession
+
+ --
+ -- id-ct-encKeyWithID content type used as the content type for the
+ -- EnvelopedData in POPOPrivKey.
+ -- It contains both a private key and an identifier for key escrow
+ -- agents to check against recovery requestors.
+ --
+
+ ct-encKeyWithID CONTENT-TYPE ::=
+ { EncKeyWithID IDENTIFIED BY id-ct-encKeyWithID }
+
+ id-ct-encKeyWithID OBJECT IDENTIFIER ::= {id-ct 21}
+
+ EncKeyWithID ::= SEQUENCE {
+ privateKey PrivateKeyInfo,
+ identifier CHOICE {
+ string UTF8String,
+ generalName GeneralName
+ } OPTIONAL
+ }
+
+ PrivateKeyInfo ::= SEQUENCE {
+ version INTEGER,
+ privateKeyAlgorithm AlgorithmIdentifier{PUBLIC-KEY, {...}},
+ privateKey OCTET STRING,
+ -- Structure of public key is in PUBLIC-KEY.&PrivateKey
+ attributes [0] IMPLICIT Attributes OPTIONAL
+ }
+
+ Attributes ::= SET OF AttributeSet{{PrivateKeyAttributes}}
+ PrivateKeyAttributes ATTRIBUTE ::= {...}
+
+ --
+ -- 6. Registration Controls in CRMF
+ --
+
+ id-regCtrl OBJECT IDENTIFIER ::= { id-pkip 1 }
+
+ RegControlSet ATTRIBUTE ::= {
+ regCtrl-regToken | regCtrl-authenticator |
+ regCtrl-pkiPublicationInfo | regCtrl-pkiArchiveOptions |
+ regCtrl-oldCertID | regCtrl-protocolEncrKey, ... }
+
+ --
+ -- 6.1. Registration Token Control
+ --
+
+ regCtrl-regToken ATTRIBUTE ::=
+ { TYPE RegToken IDENTIFIED BY id-regCtrl-regToken }
+
+ id-regCtrl-regToken OBJECT IDENTIFIER ::= { id-regCtrl 1 }
+
+ RegToken ::= UTF8String
+
+ --
+ -- 6.2. Authenticator Control
+ --
+
+ regCtrl-authenticator ATTRIBUTE ::=
+ { TYPE Authenticator IDENTIFIED BY id-regCtrl-authenticator }
+
+ id-regCtrl-authenticator OBJECT IDENTIFIER ::= { id-regCtrl 2 }
+
+ Authenticator ::= UTF8String
+
+ --
+ -- 6.3. Publication Information Control
+ --
+
+ regCtrl-pkiPublicationInfo ATTRIBUTE ::=
+ { TYPE PKIPublicationInfo IDENTIFIED BY
+ id-regCtrl-pkiPublicationInfo }
+
+ id-regCtrl-pkiPublicationInfo OBJECT IDENTIFIER ::= { id-regCtrl 3 }
+
+ PKIPublicationInfo ::= SEQUENCE {
+ action INTEGER {
+ dontPublish (0),
+ pleasePublish (1) },
+ pubInfos SEQUENCE SIZE (1..MAX) OF SinglePubInfo OPTIONAL }
+ -- pubInfos MUST NOT be present if action is "dontPublish"
+ -- (if action is "pleasePublish" and pubInfos is omitted,
+ -- "dontCare" is assumed)
+
+ SinglePubInfo ::= SEQUENCE {
+ pubMethod INTEGER {
+ dontCare (0),
+ x500 (1),
+ web (2),
+ ldap (3) },
+ pubLocation GeneralName OPTIONAL }
+
+ --
+ -- 6.4. Archive Options Control
+ --
+
+ regCtrl-pkiArchiveOptions ATTRIBUTE ::=
+ { TYPE PKIArchiveOptions IDENTIFIED BY
+ id-regCtrl-pkiArchiveOptions }
+
+ id-regCtrl-pkiArchiveOptions OBJECT IDENTIFIER ::= { id-regCtrl 4 }
+
+ PKIArchiveOptions ::= CHOICE {
+ encryptedPrivKey [0] EncryptedKey,
+ -- the actual value of the private key
+ keyGenParameters [1] KeyGenParameters,
+ -- parameters that allow the private key to be re-generated
+ archiveRemGenPrivKey [2] BOOLEAN }
+ -- set to TRUE if sender wishes receiver to archive the private
+ -- key of a key pair that the receiver generates in response to
+ -- this request; set to FALSE if no archive is desired.
+
+ EncryptedKey ::= CHOICE {
+ encryptedValue EncryptedValue, -- Deprecated
+ envelopedData [0] EnvelopedData }
+ -- The encrypted private key MUST be placed in the envelopedData
+ -- encryptedContentInfo encryptedContent OCTET STRING.
+
+ --
+ -- We skipped doing the full constraints here since this structure
+ -- has been deprecated in favor of EnvelopedData
+ --
+
+ EncryptedValue ::= SEQUENCE {
+ intendedAlg [0] AlgorithmIdentifier{ALGORITHM, {...}} OPTIONAL,
+ -- the intended algorithm for which the value will be used
+ symmAlg [1] AlgorithmIdentifier{ALGORITHM, {...}} OPTIONAL,
+ -- the symmetric algorithm used to encrypt the value
+ encSymmKey [2] BIT STRING OPTIONAL,
+ -- the (encrypted) symmetric key used to encrypt the value
+ keyAlg [3] AlgorithmIdentifier{ALGORITHM, {...}} OPTIONAL,
+ -- algorithm used to encrypt the symmetric key
+ valueHint [4] OCTET STRING OPTIONAL,
+ -- a brief description or identifier of the encValue content
+ -- (may be meaningful only to the sending entity, and used only
+ -- if EncryptedValue might be re-examined by the sending entity
+ -- in the future)
+ encValue BIT STRING }
+ -- the encrypted value itself
+ -- When EncryptedValue is used to carry a private key (as opposed to
+ -- a certificate), implementations MUST support the encValue field
+ -- containing an encrypted PrivateKeyInfo as defined in [PKCS11],
+ -- section 12.11. If encValue contains some other format/encoding
+ -- for the private key, the first octet of valueHint MAY be used
+ -- to indicate the format/encoding (but note that the possible values
+ -- of this octet are not specified at this time). In all cases, the
+ -- intendedAlg field MUST be used to indicate at least the OID of
+ -- the intended algorithm of the private key, unless this information
+ -- is known a priori to both sender and receiver by some other means.
+
+ KeyGenParameters ::= OCTET STRING
+
+ --
+ -- 6.5. OldCert ID Control
+ --
+
+ regCtrl-oldCertID ATTRIBUTE ::=
+ { TYPE OldCertId IDENTIFIED BY id-regCtrl-oldCertID }
+
+ id-regCtrl-oldCertID OBJECT IDENTIFIER ::= { id-regCtrl 5 }
+
+ OldCertId ::= CertId
+
+ CertId ::= SEQUENCE {
+ issuer GeneralName,
+ serialNumber INTEGER }
+
+ --
+ -- 6.6. Protocol Encryption Key Control
+ --
+
+ regCtrl-protocolEncrKey ATTRIBUTE ::=
+ { TYPE ProtocolEncrKey IDENTIFIED BY id-regCtrl-protocolEncrKey }
+ id-regCtrl-protocolEncrKey OBJECT IDENTIFIER ::= { id-regCtrl 6 }
+
+ ProtocolEncrKey ::= SubjectPublicKeyInfo
+
+ --
+ -- 7. Registration Info in CRMF
+ --
+
+ id-regInfo OBJECT IDENTIFIER ::= { id-pkip 2 }
+
+ RegInfoSet ATTRIBUTE ::=
+ { regInfo-utf8Pairs | regInfo-certReq }
+
+ --
+ -- 7.1. utf8Pairs RegInfo Control
+ --
+
+ regInfo-utf8Pairs ATTRIBUTE ::=
+ { TYPE UTF8Pairs IDENTIFIED BY id-regInfo-utf8Pairs }
+
+ id-regInfo-utf8Pairs OBJECT IDENTIFIER ::= { id-regInfo 1 }
+ --with syntax
+ UTF8Pairs ::= UTF8String
+
+ --
+ -- 7.2. certReq RegInfo Control
+ --
+
+ regInfo-certReq ATTRIBUTE ::=
+ { TYPE CertReq IDENTIFIED BY id-regInfo-certReq }
+
+ id-regInfo-certReq OBJECT IDENTIFIER ::= { id-regInfo 2 }
+ --with syntax
+ CertReq ::= CertRequest
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Protected-Part-Descriptors.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Protected-Part-Descriptors.asn1
index 5512f1590b..5512f1590b 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Protected-Part-Descriptors.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Protected-Part-Descriptors.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/ProtocolObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/ProtocolObjectIdentifiers.asn1
index d6e88a2e47..d6e88a2e47 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/ProtocolObjectIdentifiers.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/ProtocolObjectIdentifiers.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Coding-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Coding-Attributes.asn1
index 258c5f0b23..258c5f0b23 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Coding-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Coding-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Presentation-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Presentation-Attributes.asn1
index c8f3a2ff33..c8f3a2ff33 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Presentation-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Presentation-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Profile-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Profile-Attributes.asn1
index 365144ff35..365144ff35 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Profile-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Profile-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Reliable-Transfer-APDU.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Reliable-Transfer-APDU.asn1
index d00570b7e7..d00570b7e7 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Reliable-Transfer-APDU.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Reliable-Transfer-APDU.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Abstract-Syntaxes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Abstract-Syntaxes.asn1
index 4a59cc403b..4a59cc403b 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Abstract-Syntaxes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Abstract-Syntaxes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Generic-ROS-PDUs.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Generic-ROS-PDUs.asn1
index e55ea3c05e..e55ea3c05e 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Generic-ROS-PDUs.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Generic-ROS-PDUs.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Information-Objects-extensions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Information-Objects-extensions.asn1
index 671cf0e780..671cf0e780 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Information-Objects-extensions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Information-Objects-extensions.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Information-Objects.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Information-Objects.asn1
index b497e4126b..b497e4126b 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Information-Objects.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Information-Objects.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Realizations.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Realizations.asn1
index 73b49c8d7a..73b49c8d7a 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Realizations.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Realizations.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Useful-Definitions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Useful-Definitions.asn1
index e526ff4600..e526ff4600 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Useful-Definitions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Useful-Definitions.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/SCVP-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/SCVP-2009.asn1
new file mode 100644
index 0000000000..f74f76ff7c
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/SCVP-2009.asn1
@@ -0,0 +1,608 @@
+ SCVP-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-scvp-02(52) }
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ Extensions{}, EXTENSION, ATTRIBUTE
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57) }
+
+ AlgorithmIdentifier{}, SIGNATURE-ALGORITHM, PUBLIC-KEY, KEY-AGREE,
+ DIGEST-ALGORITHM, KEY-DERIVATION, MAC-ALGORITHM
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ Certificate, CertificateList, CertificateSerialNumber,
+ SignatureAlgorithms, SubjectPublicKeyInfo
+ FROM PKIX1Explicit-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51) }
+
+ GeneralNames, GeneralName, KeyUsage, KeyPurposeId
+ FROM PKIX1Implicit-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59) }
+
+ AttributeCertificate
+ FROM PKIXAttributeCertificate-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-attribute-cert-02(47) }
+
+ OCSPResponse
+ FROM OCSP-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-ocsp-02(48) }
+
+ ContentInfo, CONTENT-TYPE
+ FROM CryptographicMessageSyntax-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cms-2004-02(41) }
+
+ mda-sha1
+ FROM PKIXAlgs-2009
+ { iso(1) identified-organization(3) dod(6)
+ internet(1) security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-algorithms2008-02(56) } ;
+
+ ContentTypes CONTENT-TYPE ::= {ct-scvp-certValRequest |
+ ct-scvp-certValResponse | ct-scvp-valPolRequest |
+ ct-scvp-valPolResponse, ... }
+
+ id-ct OBJECT IDENTIFIER ::=
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs9(9)
+ id-smime(16) 1 }
+
+ ct-scvp-certValRequest CONTENT-TYPE ::=
+ { CVRequest IDENTIFIED BY id-ct-scvp-certValRequest }
+
+ id-ct-scvp-certValRequest OBJECT IDENTIFIER ::= { id-ct 10 }
+
+ -- SCVP Certificate Validation Request
+
+ CVRequest ::= SEQUENCE {
+ cvRequestVersion INTEGER DEFAULT 1,
+ query Query,
+ requestorRef [0] GeneralNames OPTIONAL,
+ requestNonce [1] OCTET STRING OPTIONAL,
+ requestorName [2] GeneralName OPTIONAL,
+ responderName [3] GeneralName OPTIONAL,
+ requestExtensions [4] Extensions{{RequestExtensions}}
+ OPTIONAL,
+ signatureAlg [5] AlgorithmIdentifier
+ {SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}}
+ OPTIONAL,
+ hashAlg [6] OBJECT IDENTIFIER OPTIONAL,
+ requestorText [7] UTF8String (SIZE (1..256)) OPTIONAL
+ }
+
+ -- Set of signature algorithms is coming from RFC 5280
+ -- SignatureAlgorithms SIGNATURE-ALGORITHM ::= {...}
+
+ -- Add supported request extensions here; all new items should
+ -- be added after the extension marker
+
+ RequestExtensions EXTENSION ::= {...}
+
+ Query ::= SEQUENCE {
+ queriedCerts CertReferences,
+ checks CertChecks,
+ wantBack [1] WantBack OPTIONAL,
+ validationPolicy ValidationPolicy,
+ responseFlags ResponseFlags OPTIONAL,
+ serverContextInfo [2] OCTET STRING OPTIONAL,
+ validationTime [3] GeneralizedTime OPTIONAL,
+ intermediateCerts [4] CertBundle OPTIONAL,
+ revInfos [5] RevocationInfos OPTIONAL,
+ producedAt [6] GeneralizedTime OPTIONAL,
+ queryExtensions [7] Extensions{{QueryExtensions}} OPTIONAL
+ }
+
+ -- Add supported query extensions here; all new items should be added
+ -- after the extension marker
+
+ QueryExtensions EXTENSION ::= {...}
+
+ CertReferences ::= CHOICE {
+ pkcRefs [0] SEQUENCE SIZE (1..MAX) OF PKCReference,
+ acRefs [1] SEQUENCE SIZE (1..MAX) OF ACReference
+ }
+
+ CertReference::= CHOICE {
+ pkc PKCReference,
+ ac ACReference
+ }
+
+ PKCReference ::= CHOICE {
+ cert [0] Certificate,
+ pkcRef [1] SCVPCertID
+ }
+
+ ACReference ::= CHOICE {
+ attrCert [2] AttributeCertificate,
+ acRef [3] SCVPCertID
+ }
+
+ HashAlgorithm ::= AlgorithmIdentifier{DIGEST-ALGORITHM,
+ {mda-sha1, ...}}
+
+ SCVPCertID ::= SEQUENCE {
+ certHash OCTET STRING,
+ issuerSerial SCVPIssuerSerial,
+ hashAlgorithm HashAlgorithm
+ DEFAULT { algorithm mda-sha1.&id }
+ }
+
+ SCVPIssuerSerial ::= SEQUENCE {
+ issuer GeneralNames,
+ serialNumber CertificateSerialNumber
+ }
+
+ ValidationPolicy ::= SEQUENCE {
+ validationPolRef ValidationPolRef,
+ validationAlg [0] ValidationAlg OPTIONAL,
+ userPolicySet [1] SEQUENCE SIZE (1..MAX) OF OBJECT
+ IDENTIFIER OPTIONAL,
+ inhibitPolicyMapping [2] BOOLEAN OPTIONAL,
+ requireExplicitPolicy [3] BOOLEAN OPTIONAL,
+ inhibitAnyPolicy [4] BOOLEAN OPTIONAL,
+ trustAnchors [5] TrustAnchors OPTIONAL,
+ keyUsages [6] SEQUENCE OF KeyUsage OPTIONAL,
+ extendedKeyUsages [7] SEQUENCE OF KeyPurposeId OPTIONAL,
+ specifiedKeyUsages [8] SEQUENCE OF KeyPurposeId OPTIONAL
+ }
+
+ CertChecks ::= SEQUENCE SIZE (1..MAX) OF
+ OBJECT IDENTIFIER (CertCheckSet | ACertCheckSet, ... )
+
+ WantBack ::= SEQUENCE SIZE (1..MAX) OF
+ WANT-BACK.&id ({AllWantBacks})
+
+ POLICY ::= ATTRIBUTE
+
+ ValidationPolRefSet POLICY ::= {
+ svp-defaultValPolicy, ...
+ }
+
+ ValidationPolRef ::= SEQUENCE {
+ valPolId POLICY.&id,
+ valPolParams POLICY.&Type OPTIONAL
+ }
+
+ ValidationAlgSet POLICY ::= {
+ svp-basicValAlg, ...
+ }
+
+ ValidationAlg ::= SEQUENCE {
+ valAlgId POLICY.&id,
+ parameters POLICY.&Type OPTIONAL
+ }
+
+ NameValidationAlgSet POLICY ::= {
+ svp-nameValAlg, ...
+ }
+
+ NameValidationAlgParams ::= SEQUENCE {
+ nameCompAlgId OBJECT IDENTIFIER (NameCompAlgSet, ... ),
+ validationNames GeneralNames
+ }
+
+ TrustAnchors ::= SEQUENCE SIZE (1..MAX) OF PKCReference
+ KeyAgreePublicKey ::= SEQUENCE {
+ algorithm AlgorithmIdentifier{KEY-AGREE,
+ {SupportedKeyAgreePublicKeys}},
+ publicKey BIT STRING,
+ macAlgorithm AlgorithmIdentifier{MAC-ALGORITHM,
+ {SupportedMACAlgorithms}},
+ kDF AlgorithmIdentifier{KEY-DERIVATION,
+ {SupportedKeyDerivationFunctions}}
+ OPTIONAL
+ }
+
+ SupportedKeyAgreePublicKeys KEY-AGREE ::= {...}
+ SupportedMACAlgorithms MAC-ALGORITHM ::= {...}
+ SupportedKeyDerivationFunctions KEY-DERIVATION ::= {...}
+
+ ResponseFlags ::= SEQUENCE {
+ fullRequestInResponse [0] BOOLEAN DEFAULT FALSE,
+ responseValidationPolByRef [1] BOOLEAN DEFAULT TRUE,
+ protectResponse [2] BOOLEAN DEFAULT TRUE,
+ cachedResponse [3] BOOLEAN DEFAULT TRUE
+ }
+
+ CertBundle ::= SEQUENCE SIZE (1..MAX) OF Certificate
+
+ RevocationInfos ::= SEQUENCE SIZE (1..MAX) OF RevocationInfo
+
+ RevocationInfo ::= CHOICE {
+ crl [0] CertificateList,
+ delta-crl [1] CertificateList,
+ ocsp [2] OCSPResponse,
+ other [3] OtherRevInfo
+ }
+
+ REV-INFO ::= TYPE-IDENTIFIER
+
+ OtherRevInfo ::= SEQUENCE {
+ riType REV-INFO.&id,
+ riValue REV-INFO.&Type
+ }
+
+ -- SCVP Certificate Validation Response
+
+ ct-scvp-certValResponse CONTENT-TYPE ::=
+ { CVResponse IDENTIFIED BY id-ct-scvp-certValResponse }
+
+ id-ct-scvp-certValResponse OBJECT IDENTIFIER ::= { id-ct 11 }
+
+ CVResponse ::= SEQUENCE {
+ cvResponseVersion INTEGER,
+ serverConfigurationID INTEGER,
+ producedAt GeneralizedTime,
+ responseStatus ResponseStatus,
+ respValidationPolicy [0] RespValidationPolicy OPTIONAL,
+ requestRef [1] RequestReference OPTIONAL,
+ requestorRef [2] GeneralNames OPTIONAL,
+ requestorName [3] GeneralNames OPTIONAL,
+ replyObjects [4] ReplyObjects OPTIONAL,
+ respNonce [5] OCTET STRING OPTIONAL,
+ serverContextInfo [6] OCTET STRING OPTIONAL,
+ cvResponseExtensions [7] Extensions{{CVResponseExtensions}}
+ OPTIONAL,
+ requestorText [8] UTF8String (SIZE (1..256)) OPTIONAL
+ }
+
+ -- This document defines no extensions
+ CVResponseExtensions EXTENSION ::= {...}
+
+ ResponseStatus ::= SEQUENCE {
+ statusCode CVStatusCode DEFAULT okay,
+ errorMessage UTF8String OPTIONAL
+ }
+
+ CVStatusCode ::= ENUMERATED {
+ okay (0),
+ skipUnrecognizedItems (1),
+ tooBusy (10),
+ invalidRequest (11),
+ internalError (12),
+ badStructure (20),
+ unsupportedVersion (21),
+ abortUnrecognizedItems (22),
+ unrecognizedSigKey (23),
+ badSignatureOrMAC (24),
+ unableToDecode (25),
+ notAuthorized (26),
+ unsupportedChecks (27),
+ unsupportedWantBacks (28),
+ unsupportedSignatureOrMAC (29),
+ invalidSignatureOrMAC (30),
+ protectedResponseUnsupported (31),
+ unrecognizedResponderName (32),
+ relayingLoop (40),
+ unrecognizedValPol (50),
+ unrecognizedValAlg (51),
+ fullRequestInResponseUnsupported (52),
+ fullPolResponseUnsupported (53),
+ inhibitPolicyMappingUnsupported (54),
+ requireExplicitPolicyUnsupported (55),
+ inhibitAnyPolicyUnsupported (56),
+ validationTimeUnsupported (57),
+ unrecognizedCritQueryExt (63),
+ unrecognizedCritRequestExt (64),
+ ...
+ }
+
+ RespValidationPolicy ::= ValidationPolicy
+
+ RequestReference ::= CHOICE {
+ requestHash [0] HashValue, -- hash of CVRequest
+ fullRequest [1] CVRequest }
+
+ HashValue ::= SEQUENCE {
+ algorithm HashAlgorithm
+ DEFAULT { algorithm mda-sha1.&id },
+ value OCTET STRING }
+
+ ReplyObjects ::= SEQUENCE SIZE (1..MAX) OF CertReply
+
+ CertReply ::= SEQUENCE {
+ cert CertReference,
+ replyStatus ReplyStatus DEFAULT success,
+ replyValTime GeneralizedTime,
+ replyChecks ReplyChecks,
+ replyWantBacks ReplyWantBacks,
+ validationErrors [0] SEQUENCE SIZE (1..MAX) OF
+ OBJECT IDENTIFIER ( BasicValidationErrorSet |
+ NameValidationErrorSet,
+ ... ) OPTIONAL,
+ nextUpdate [1] GeneralizedTime OPTIONAL,
+ certReplyExtensions [2] Extensions{{...}} OPTIONAL
+ }
+
+ ReplyStatus ::= ENUMERATED {
+ success (0),
+ malformedPKC (1),
+ malformedAC (2),
+ unavailableValidationTime (3),
+ referenceCertHashFail (4),
+ certPathConstructFail (5),
+ certPathNotValid (6),
+ certPathNotValidNow (7),
+ wantBackUnsatisfied (8)
+ }
+ ReplyChecks ::= SEQUENCE OF ReplyCheck
+
+ ReplyCheck ::= SEQUENCE {
+ check OBJECT IDENTIFIER (CertCheckSet | ACertCheckSet, ... ),
+ status INTEGER DEFAULT 0
+ }
+
+ ReplyWantBacks ::= SEQUENCE OF ReplyWantBack
+
+ ReplyWantBack::= SEQUENCE {
+ wb WANT-BACK.&id({AllWantBacks}),
+ value OCTET STRING
+ (CONTAINING WANT-BACK.&Type({AllWantBacks}{@wb}))
+ }
+
+ WANT-BACK ::= TYPE-IDENTIFIER
+
+ AllWantBacks WANT-BACK ::= {
+ WantBackSet | ACertWantBackSet | AnyWantBackSet, ...
+ }
+
+ CertBundles ::= SEQUENCE SIZE (1..MAX) OF CertBundle
+
+ RevInfoWantBack ::= SEQUENCE {
+ revocationInfo RevocationInfos,
+ extraCerts CertBundle OPTIONAL
+ }
+
+ SCVPResponses ::= SEQUENCE OF ContentInfo
+
+ -- SCVP Validation Policies Request
+
+ ct-scvp-valPolRequest CONTENT-TYPE ::=
+ { ValPolRequest IDENTIFIED BY id-ct-scvp-valPolRequest }
+
+ id-ct-scvp-valPolRequest OBJECT IDENTIFIER ::= { id-ct 12 }
+
+ ValPolRequest ::= SEQUENCE {
+ vpRequestVersion INTEGER DEFAULT 1,
+ requestNonce OCTET STRING
+ }
+
+ -- SCVP Validation Policies Response
+
+ ct-scvp-valPolResponse CONTENT-TYPE ::=
+ { ValPolResponse IDENTIFIED BY id-ct-scvp-valPolResponse }
+
+ id-ct-scvp-valPolResponse OBJECT IDENTIFIER ::= { id-ct 13 }
+ ValPolResponse ::= SEQUENCE {
+ vpResponseVersion INTEGER,
+ maxCVRequestVersion INTEGER,
+ maxVPRequestVersion INTEGER,
+ serverConfigurationID INTEGER,
+ thisUpdate GeneralizedTime,
+ nextUpdate GeneralizedTime OPTIONAL,
+ supportedChecks CertChecks,
+ supportedWantBacks WantBack,
+ validationPolicies SEQUENCE OF OBJECT IDENTIFIER,
+ validationAlgs SEQUENCE OF OBJECT IDENTIFIER,
+ authPolicies SEQUENCE OF AuthPolicy,
+ responseTypes ResponseTypes,
+ defaultPolicyValues RespValidationPolicy,
+ revocationInfoTypes RevocationInfoTypes,
+ signatureGeneration SEQUENCE OF AlgorithmIdentifier
+ {SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}},
+ signatureVerification SEQUENCE OF AlgorithmIdentifier
+ {SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}},
+ hashAlgorithms SEQUENCE SIZE (1..MAX) OF
+ OBJECT IDENTIFIER,
+ serverPublicKeys SEQUENCE OF KeyAgreePublicKey
+ OPTIONAL,
+ clockSkew INTEGER DEFAULT 10,
+ requestNonce OCTET STRING OPTIONAL
+ }
+
+ ResponseTypes ::= ENUMERATED {
+ cached-only (0),
+ non-cached-only (1),
+ cached-and-non-cached (2)
+ }
+
+ RevocationInfoTypes ::= BIT STRING {
+ fullCRLs (0),
+ deltaCRLs (1),
+ indirectCRLs (2),
+ oCSPResponses (3)
+ }
+
+ AuthPolicy ::= OBJECT IDENTIFIER
+
+ -- SCVP Check Identifiers
+
+ id-stc OBJECT IDENTIFIER ::=
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) 17 }
+
+ CertCheckSet OBJECT IDENTIFIER ::= {
+ id-stc-build-pkc-path | id-stc-build-valid-pkc-path |
+ id-stc-build-status-checked-pkc-path, ... }
+
+ id-stc-build-pkc-path OBJECT IDENTIFIER ::= { id-stc 1 }
+ id-stc-build-valid-pkc-path OBJECT IDENTIFIER ::= { id-stc 2 }
+ id-stc-build-status-checked-pkc-path
+ OBJECT IDENTIFIER ::= { id-stc 3 }
+
+ ACertCheckSet OBJECT IDENTIFIER ::= {
+ id-stc-build-aa-path | id-stc-build-valid-aa-path |
+ id-stc-build-status-checked-aa-path |
+ id-stc-status-check-ac-and-build-status-checked-aa-path
+ }
+
+ id-stc-build-aa-path OBJECT IDENTIFIER ::= { id-stc 4 }
+ id-stc-build-valid-aa-path OBJECT IDENTIFIER ::= { id-stc 5 }
+ id-stc-build-status-checked-aa-path
+ OBJECT IDENTIFIER ::= { id-stc 6 }
+ id-stc-status-check-ac-and-build-status-checked-aa-path
+ OBJECT IDENTIFIER ::= { id-stc 7 }
+
+ -- SCVP WantBack Identifiers
+
+ id-swb OBJECT IDENTIFIER ::=
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) 18 }
+
+ WantBackSet WANT-BACK ::= {
+ swb-pkc-cert | swb-pkc-best-cert-path |
+ swb-pkc-revocation-info | swb-pkc-public-key-info |
+ swb-pkc-all-cert-paths | swb-pkc-ee-revocation-info |
+ swb-pkc-CAs-revocation-info
+ }
+
+ ACertWantBackSet WANT-BACK ::= {
+ swb-ac-cert | swb-aa-cert-path |
+ swb-aa-revocation-info | swb-ac-revocation-info
+ }
+
+ AnyWantBackSet WANT-BACK ::= { swb-relayed-responses }
+
+ swb-pkc-best-cert-path WANT-BACK ::=
+ { CertBundle IDENTIFIED BY id-swb-pkc-best-cert-path }
+ id-swb-pkc-best-cert-path OBJECT IDENTIFIER ::= { id-swb 1 }
+ swb-pkc-revocation-info WANT-BACK ::=
+ { RevInfoWantBack IDENTIFIED BY id-swb-pkc-revocation-info }
+ id-swb-pkc-revocation-info OBJECT IDENTIFIER ::= { id-swb 2 }
+
+ swb-pkc-public-key-info WANT-BACK ::=
+ { SubjectPublicKeyInfo IDENTIFIED BY id-swb-pkc-public-key-info }
+ id-swb-pkc-public-key-info OBJECT IDENTIFIER ::= { id-swb 4 }
+
+ swb-aa-cert-path WANT-BACK ::=
+ {CertBundle IDENTIFIED BY id-swb-aa-cert-path }
+ id-swb-aa-cert-path OBJECT IDENTIFIER ::= { id-swb 5 }
+
+ swb-aa-revocation-info WANT-BACK ::=
+ { RevInfoWantBack IDENTIFIED BY id-swb-aa-revocation-info }
+ id-swb-aa-revocation-info OBJECT IDENTIFIER ::= { id-swb 6 }
+
+ swb-ac-revocation-info WANT-BACK ::=
+ { RevInfoWantBack IDENTIFIED BY id-swb-ac-revocation-info }
+ id-swb-ac-revocation-info OBJECT IDENTIFIER ::= { id-swb 7 }
+
+ swb-relayed-responses WANT-BACK ::=
+ {SCVPResponses IDENTIFIED BY id-swb-relayed-responses }
+
+ id-swb-relayed-responses OBJECT IDENTIFIER ::= { id-swb 9 }
+
+ swb-pkc-all-cert-paths WANT-BACK ::=
+ {CertBundles IDENTIFIED BY id-swb-pkc-all-cert-paths }
+ id-swb-pkc-all-cert-paths OBJECT IDENTIFIER ::= { id-swb 12}
+
+ swb-pkc-ee-revocation-info WANT-BACK ::=
+ { RevInfoWantBack IDENTIFIED BY id-swb-pkc-ee-revocation-info }
+ id-swb-pkc-ee-revocation-info OBJECT IDENTIFIER ::= { id-swb 13}
+
+ swb-pkc-CAs-revocation-info WANT-BACK ::=
+ { RevInfoWantBack IDENTIFIED BY id-swb-pkc-CAs-revocation-info }
+ id-swb-pkc-CAs-revocation-info OBJECT IDENTIFIER ::= { id-swb 14}
+
+ swb-pkc-cert WANT-BACK ::=
+ { Certificate IDENTIFIED BY id-swb-pkc-cert }
+ id-swb-pkc-cert OBJECT IDENTIFIER ::= { id-swb 10}
+
+ swb-ac-cert WANT-BACK ::=
+ { AttributeCertificate IDENTIFIED BY id-swb-ac-cert }
+ id-swb-ac-cert OBJECT IDENTIFIER ::= { id-swb 11}
+
+ -- SCVP Validation Policy and Algorithm Identifiers
+
+ id-svp OBJECT IDENTIFIER ::=
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) 19 }
+
+ svp-defaultValPolicy POLICY ::=
+ { IDENTIFIED BY id-svp-defaultValPolicy }
+
+ id-svp-defaultValPolicy OBJECT IDENTIFIER ::= { id-svp 1 }
+
+ -- SCVP Basic Validation Algorithm Identifier
+
+ svp-basicValAlg POLICY ::= {IDENTIFIED BY id-svp-basicValAlg }
+
+ id-svp-basicValAlg OBJECT IDENTIFIER ::= { id-svp 3 }
+
+ -- SCVP Basic Validation Algorithm Errors
+
+ id-bvae OBJECT IDENTIFIER ::= id-svp-basicValAlg
+
+ BasicValidationErrorSet OBJECT IDENTIFIER ::= {
+ id-bvae-expired | id-bvae-not-yet-valid |
+ id-bvae-wrongTrustAnchor | id-bvae-noValidCertPath |
+ id-bvae-revoked | id-bvae-invalidKeyPurpose |
+ id-bvae-invalidKeyUsage | id-bvae-invalidCertPolicy
+ }
+
+ id-bvae-expired OBJECT IDENTIFIER ::= { id-bvae 1 }
+ id-bvae-not-yet-valid OBJECT IDENTIFIER ::= { id-bvae 2 }
+ id-bvae-wrongTrustAnchor OBJECT IDENTIFIER ::= { id-bvae 3 }
+ id-bvae-noValidCertPath OBJECT IDENTIFIER ::= { id-bvae 4 }
+ id-bvae-revoked OBJECT IDENTIFIER ::= { id-bvae 5 }
+ id-bvae-invalidKeyPurpose OBJECT IDENTIFIER ::= { id-bvae 9 }
+ id-bvae-invalidKeyUsage OBJECT IDENTIFIER ::= { id-bvae 10 }
+ id-bvae-invalidCertPolicy OBJECT IDENTIFIER ::= { id-bvae 11 }
+
+ -- SCVP Name Validation Algorithm Identifier
+
+ svp-nameValAlg POLICY ::=
+ {TYPE NameValidationAlgParams IDENTIFIED BY id-svp-nameValAlg }
+
+ id-svp-nameValAlg OBJECT IDENTIFIER ::= { id-svp 2 }
+
+ -- SCVP Name Validation Algorithm DN comparison algorithm
+
+ NameCompAlgSet OBJECT IDENTIFIER ::= {
+ id-nva-dnCompAlg
+ }
+
+ id-nva-dnCompAlg OBJECT IDENTIFIER ::= { id-svp 4 }
+ -- SCVP Name Validation Algorithm Errors
+
+ id-nvae OBJECT IDENTIFIER ::= id-svp-nameValAlg
+
+ NameValidationErrorSet OBJECT IDENTIFIER ::= {
+ id-nvae-name-mismatch | id-nvae-no-name | id-nvae-unknown-alg |
+ id-nvae-bad-name | id-nvae-bad-name-type | id-nvae-mixed-names
+ }
+
+ id-nvae-name-mismatch OBJECT IDENTIFIER ::= { id-nvae 1 }
+ id-nvae-no-name OBJECT IDENTIFIER ::= { id-nvae 2 }
+ id-nvae-unknown-alg OBJECT IDENTIFIER ::= { id-nvae 3 }
+ id-nvae-bad-name OBJECT IDENTIFIER ::= { id-nvae 4 }
+ id-nvae-bad-name-type OBJECT IDENTIFIER ::= { id-nvae 5 }
+ id-nvae-mixed-names OBJECT IDENTIFIER ::= { id-nvae 6 }
+
+ -- SCVP Extended Key Usage Key Purpose Identifiers
+
+ id-kp OBJECT IDENTIFIER ::=
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) 3 }
+
+ SvcpExtKeyUsageSet OBJECT IDENTIFIER ::= {
+ id-kp-scvpServer | id-kp-scvpClient
+ }
+
+ id-kp-scvpServer OBJECT IDENTIFIER ::= { id-kp 15 }
+
+ id-kp-scvpClient OBJECT IDENTIFIER ::= { id-kp 16 }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/SecureMimeMessageV3dot1-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/SecureMimeMessageV3dot1-2009.asn1
new file mode 100644
index 0000000000..2bd2aaa435
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/SecureMimeMessageV3dot1-2009.asn1
@@ -0,0 +1,122 @@
+ SecureMimeMessageV3dot1-2009
+ {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-msg-v3dot1-02(39)}
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ SMIME-CAPS, SMIMECapabilities{}
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ ATTRIBUTE
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57)}
+
+ SubjectKeyIdentifier, IssuerAndSerialNumber, RecipientKeyIdentifier
+ FROM CryptographicMessageSyntax-2009
+ {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cms-2004-02(41)}
+
+ rc2-cbc, SMimeCaps
+ FROM CryptographicMessageSyntaxAlgorithms-2009
+ {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cmsalg-2001-02(37)}
+
+ SMimeCaps
+ FROM PKIXAlgs-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-algorithms2008-02(56)}
+
+ SMimeCaps
+ FROM PKIX1-PSS-OAEP-Algorithms-2009
+ {iso(1) identified-organization(3) dod(6) internet(1)
+ security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-rsa-pkalgs-02(54)};
+
+ SMimeAttributeSet ATTRIBUTE ::=
+ { aa-smimeCapabilities | aa-encrypKeyPref, ... }
+
+ -- id-aa is the arc with all new authenticated and unauthenticated
+ -- attributes produced by the S/MIME Working Group
+
+ id-aa OBJECT IDENTIFIER ::=
+ { iso(1) member-body(2) usa(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) attributes(2)}
+
+ -- The S/MIME Capabilities attribute provides a method of broadcasting
+ -- the symmetric capabilities understood. Algorithms SHOULD be ordered
+ -- by preference and grouped by type
+
+ aa-smimeCapabilities ATTRIBUTE ::=
+ { TYPE SMIMECapabilities{{SMimeCapsSet}} IDENTIFIED BY
+ smimeCapabilities }
+ smimeCapabilities OBJECT IDENTIFIER ::=
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ 15 }
+
+ SMimeCapsSet SMIME-CAPS ::=
+ { cap-preferBinaryInside | cap-RC2CBC |
+ PKIXAlgs-2009.SMimeCaps |
+ CryptographicMessageSyntaxAlgorithms-2009.SMimeCaps |
+ PKIX1-PSS-OAEP-Algorithms-2009.SMimeCaps, ... }
+
+ -- Encryption Key Preference provides a method of broadcasting the
+ -- preferred encryption certificate.
+
+ aa-encrypKeyPref ATTRIBUTE ::=
+ { TYPE SMIMEEncryptionKeyPreference
+ IDENTIFIED BY id-aa-encrypKeyPref }
+
+ id-aa-encrypKeyPref OBJECT IDENTIFIER ::= {id-aa 11}
+
+ SMIMEEncryptionKeyPreference ::= CHOICE {
+ issuerAndSerialNumber [0] IssuerAndSerialNumber,
+ receipentKeyId [1] RecipientKeyIdentifier,
+ subjectAltKeyIdentifier [2] SubjectKeyIdentifier
+ }
+
+ -- receipentKeyId is spelt incorrectly, but kept for historical
+ -- reasons.
+
+ id-smime OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs9(9) 16 }
+
+ id-cap OBJECT IDENTIFIER ::= { id-smime 11 }
+
+ -- The preferBinaryInside indicates an ability to receive messages
+ -- with binary encoding inside the CMS wrapper
+
+ cap-preferBinaryInside SMIME-CAPS ::=
+ { -- No value -- IDENTIFIED BY id-cap-preferBinaryInside }
+
+ id-cap-preferBinaryInside OBJECT IDENTIFIER ::= { id-cap 1 }
+
+ -- The following list OIDs to be used with S/MIME V3
+
+ -- Signature Algorithms Not Found in [RFC3370]
+ --
+ -- md2WithRSAEncryption OBJECT IDENTIFIER ::=
+ -- {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-1(1)
+ -- 2}
+ --
+ -- Other Signed Attributes
+ --
+ -- signingTime OBJECT IDENTIFIER ::=
+ -- {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ -- 5}
+ -- See [RFC5652] for a description of how to encode the attribute
+ -- value.
+
+ cap-RC2CBC SMIME-CAPS ::=
+ { TYPE SMIMECapabilitiesParametersForRC2CBC
+ IDENTIFIED BY rc2-cbc}
+
+ SMIMECapabilitiesParametersForRC2CBC ::= INTEGER (40 | 128, ...)
+ -- (RC2 Key Length (number of bits))
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/SelectedAttributeTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/SelectedAttributeTypes.asn1
index 07bba30690..07bba30690 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/SelectedAttributeTypes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/SelectedAttributeTypes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/SeseAPDUs.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/SeseAPDUs.asn1
index 2917122e94..2917122e94 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/SeseAPDUs.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/SeseAPDUs.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/SpkmGssTokens.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/SpkmGssTokens.asn1
index 02205bd64c..02205bd64c 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/SpkmGssTokens.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/SpkmGssTokens.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Style-Descriptors.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Style-Descriptors.asn1
index 8f033eab6f..8f033eab6f 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Style-Descriptors.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Style-Descriptors.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Subprofiles.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Subprofiles.asn1
index bfcd0b5dbc..bfcd0b5dbc 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Subprofiles.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Subprofiles.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Temporal-Relationships.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Temporal-Relationships.asn1
index 9633995e3b..9633995e3b 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Temporal-Relationships.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Temporal-Relationships.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Text-Units.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Text-Units.asn1
index ccc64a52f5..ccc64a52f5 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Text-Units.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Text-Units.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/UpperBounds.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/UpperBounds.asn1
index c97c83a569..c97c83a569 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/UpperBounds.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/UpperBounds.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/UsefulDefinitions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/UsefulDefinitions.asn1
index d9601bb7d0..d9601bb7d0 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/UsefulDefinitions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/UsefulDefinitions.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Videotex-Coding-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Videotex-Coding-Attributes.asn1
index 18e51cbc0d..18e51cbc0d 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Videotex-Coding-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Videotex-Coding-Attributes.asn1
diff --git a/lib/asn1/test/asn1_test_lib.erl b/lib/asn1/test/asn1_test_lib.erl
index da07cd1118..a5f46046ff 100644
--- a/lib/asn1/test/asn1_test_lib.erl
+++ b/lib/asn1/test/asn1_test_lib.erl
@@ -21,6 +21,7 @@
-export([compile/3,compile_all/3,compile_erlang/3,
hex_to_bin/1,
+ match_value/2,
parallel/0,
roundtrip/3,roundtrip/4,roundtrip_enc/3,roundtrip_enc/4]).
@@ -106,6 +107,24 @@ compile_erlang(Mod, Config, Options) ->
hex_to_bin(S) ->
<< <<(hex2num(C)):4>> || C <- S, C =/= $\s >>.
+%% match_value(Pattern, Value) -> ok.
+%% Match Pattern against Value. If the Pattern contains in any
+%% position, the corresponding position in the Value can be
+%% anything. Generate an exception if the Pattern and Value don't
+%% match.
+
+match_value('_', _) ->
+ ok;
+match_value([H1|T1], [H2|T2]) ->
+ match_value(H1, H2),
+ match_value(T1, T2);
+match_value(T1, T2) when tuple_size(T1) =:= tuple_size(T2) ->
+ match_value_tuple(1, T1, T2);
+match_value(Same, Same) ->
+ ok;
+match_value(V1, V2) ->
+ error({nomatch,V1,V2}).
+
roundtrip(Mod, Type, Value) ->
roundtrip(Mod, Type, Value, Value).
@@ -132,6 +151,12 @@ hex2num(C) when $0 =< C, C =< $9 -> C - $0;
hex2num(C) when $A =< C, C =< $F -> C - $A + 10;
hex2num(C) when $a =< C, C =< $f -> C - $a + 10.
+match_value_tuple(I, T1, T2) when I =< tuple_size(T1) ->
+ match_value(element(I, T1), element(I, T2)),
+ match_value_tuple(I+1, T1, T2);
+match_value_tuple(_, _, _) ->
+ ok.
+
test_ber_indefinite(Mod, Type, Encoded, ExpectedValue) ->
case Mod:encoding_rule() of
ber ->
diff --git a/lib/asn1/test/error_SUITE.erl b/lib/asn1/test/error_SUITE.erl
index 1edd60f7c8..a9893b91cc 100644
--- a/lib/asn1/test/error_SUITE.erl
+++ b/lib/asn1/test/error_SUITE.erl
@@ -19,9 +19,12 @@
-module(error_SUITE).
-export([suite/0,all/0,groups/0,
- already_defined/1,bitstrings/1,enumerated/1,
- imports/1,instance_of/1,integers/1,objects/1,
- parameterization/1,values/1]).
+ already_defined/1,bitstrings/1,
+ classes/1,constraints/1,constructed/1,enumerated/1,
+ imports_exports/1,instance_of/1,integers/1,objects/1,
+ object_field_extraction/1,oids/1,rel_oids/1,
+ object_sets/1,parameterization/1,
+ syntax/1,table_constraints/1,tags/1,values/1]).
-include_lib("test_server/include/test_server.hrl").
@@ -34,12 +37,22 @@ groups() ->
[{p,parallel(),
[already_defined,
bitstrings,
+ classes,
+ constraints,
+ constructed,
enumerated,
- imports,
+ imports_exports,
instance_of,
integers,
objects,
+ object_field_extraction,
+ object_sets,
+ oids,
+ rel_oids,
parameterization,
+ syntax,
+ table_constraints,
+ tags,
values]}].
parallel() ->
@@ -94,6 +107,46 @@ bitstrings(Config) ->
]} = run(P, Config),
ok.
+classes(Config) ->
+ M = 'Classes',
+ P = {M,
+ <<"Classes DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ " LowerCase ::= CLASS { &id INTEGER UNIQUE }\n"
+ " CL ::= CLASS { &id INTEGER UNIQUE DEFAULT 42}\n"
+ "END\n">>},
+ {error,
+ [{structured_error,{M,2},asn1ct_check,
+ {illegal_class_name,'LowerCase'}},
+ {structured_error,{M,3},asn1ct_check,
+ {unique_and_default,id}}
+ ]} = run(P, Config),
+ ok.
+
+constraints(Config) ->
+ M = 'Constraints',
+ P = {M,
+ <<"Constraints DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ " II-1 ::= INTEGER (holder-1.&obj)\n"
+ " II-2 ::= INTEGER ('1234'H<..20)\n"
+ " II-3 ::= INTEGER (1..<\"abc\")\n"
+ " II-4 ::= INTEGER (10..1)\n"
+
+ " HOLDER ::= CLASS {\n"
+ " &obj HOLDER OPTIONAL\n"
+ " }\n"
+
+ " holder-1 HOLDER ::= { &obj holder-2 }\n"
+ " holder-2 HOLDER ::= { }\n"
+ "END\n">>},
+ {error,
+ [
+ {structured_error,{M,2},asn1ct_check,illegal_value},
+ {structured_error,{M,3},asn1ct_check,illegal_integer_value},
+ {structured_error,{M,4},asn1ct_check,illegal_integer_value},
+ {structured_error,{M,5},asn1ct_check,reversed_range}
+ ]} = run(P, Config),
+ ok.
+
enumerated(Config) ->
M = 'Enumerated',
P = {M,
@@ -111,38 +164,77 @@ enumerated(Config) ->
" S2 ::= SEQUENCE {\n"
" e2 EnumExt DEFAULT xyz\n"
" }\n"
+
+ " BadEnum1 ::= ENUMERATED {a, b, c, b }\n"
+ " BadEnum2 ::= ENUMERATED {a(1), b(2), b(3) }\n"
+ " BadEnum3 ::= ENUMERATED {a(1), b(1) }\n"
+ " BadEnum4 ::= ENUMERATED {a, b, ..., c(0) }\n"
+ " BadEnum5 ::= ENUMERATED {a, b, ..., c(10), d(5) }\n"
"END\n">>},
{error,
[
- {structured_error,{'Enumerated',3},asn1ct_check,{undefined,d}},
- {structured_error,{'Enumerated',5},asn1ct_check,{undefined,z}},
- {structured_error,{'Enumerated',10},asn1ct_check,{undefined,aa}},
- {structured_error,{'Enumerated',13},asn1ct_check,{undefined,xyz}}
+ {structured_error,{M,3},asn1ct_check,{undefined,d}},
+ {structured_error,{M,5},asn1ct_check,{undefined,z}},
+ {structured_error,{M,6},asn1ct_check,{undefined,aa}},
+ {structured_error,{M,12},asn1ct_check,{undefined,xyz}},
+ {structured_error,{M,15},asn1ct_check,
+ {enum_illegal_redefinition,b}},
+ {structured_error,{M,16},asn1ct_check,
+ {enum_illegal_redefinition,b}},
+ {structured_error,{M,17},asn1ct_check,
+ {enum_reused_value,b,1}},
+ {structured_error,{M,18},asn1ct_check,
+ {enum_reused_value,c,0}},
+ {structured_error,{M,19},asn1ct_check,
+ {enum_not_ascending,d,5,10}}
]
} = run(P, Config),
ok.
-imports(Config) ->
+imports_exports(Config) ->
Ext = 'ExternalModule',
ExtP = {Ext,
<<"ExternalModule DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ "IMPORTS\n"
+ " Int, NonExistingImport FROM ImportsFrom;\n"
+
+ " Existing ::= INTEGER\n"
"END\n">>},
- ok = run(ExtP, Config),
+ {error,
+ [{structured_error,
+ {Ext,3},
+ asn1ct_check,
+ {undefined_import,'NonExistingImport',
+ 'ImportsFrom'}}]} = run(ExtP, Config),
M = 'Imports',
P = {M,
<<"Imports DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
- "IMPORTS NotDefined FROM ExternalModule\n"
- "X FROM UndefinedModule objid\n"
- "Y, Z FROM UndefinedModule2;\n"
+ "EXPORTS\n"
+ " T, UndefinedType;\n"
+
+ "IMPORTS\n"
+ " NotDefined, Existing, Int, NonExistingImport\n"
+ " FROM ExternalModule\n"
+ " X FROM UndefinedModule objid\n"
+ " Y, Z FROM UndefinedModule2;\n"
+
"objid OBJECT IDENTIFIER ::= {joint-iso-ccitt(2) remote-operations(4)\n"
" notation(0)}\n"
+ "T ::= INTEGER\n"
"END\n">>},
- {error,[{structured_error,{M,2},asn1ct_check,
- {undefined_import,'NotDefined','ExternalModule'}},
- {structured_error,{M,3},asn1ct_check,{undefined_import,'X','UndefinedModule'}},
- {structured_error,{M,4},asn1ct_check,{undefined_import,'Y','UndefinedModule2'}},
- {structured_error,{M,4},asn1ct_check,{undefined_import,'Z','UndefinedModule2'}}
+ {error,[{structured_error,{M,3},asn1ct_check,
+ {undefined_export, 'UndefinedType'}},
+ {structured_error,{M,5},asn1ct_check,
+ {undefined_import,'NonExistingImport',Ext}},
+ {structured_error,{M,5},asn1ct_check,
+ {undefined_import,'NotDefined',Ext}},
+ {structured_error,{M,7},asn1ct_check,
+ {undefined_import,'X','UndefinedModule'}},
+ {structured_error,{M,8},asn1ct_check,
+ {undefined_import,'Y','UndefinedModule2'}},
+ {structured_error,{M,8},asn1ct_check,
+ {undefined_import,'Z','UndefinedModule2'}}
]} = run(P, Config),
ok.
@@ -170,11 +262,14 @@ integers(Config) ->
" Int1 ::= INTEGER {a(1), a(1)}\n"
" Int2 ::= INTEGER {a(1), b(2), a(3)}\n"
" Int3 ::= INTEGER {x(1), y(1)}\n"
+ " i0 INTEGER ::= 1\n"
+ " Int4 ::= INTEGER {x(i0), y(undef) }\n"
"END\n">>},
{error,
[{structured_error,{M,2},asn1ct_check,{namelist_redefinition,a}},
{structured_error,{M,3},asn1ct_check,{namelist_redefinition,a}},
- {structured_error,{M,4},asn1ct_check,{value_reused,1}}
+ {structured_error,{M,4},asn1ct_check,{value_reused,1}},
+ {structured_error,{M,6},asn1ct_check,{undefined,undef}}
]} = run(P, Config),
ok.
@@ -188,6 +283,11 @@ objects(Config) ->
" obj3 CL ::= { &Data OCTET STRING }\n"
" obj4 SMALL ::= { &code 42 }\n"
" InvalidSet CL ::= { obj1 }\n"
+ " obj5 CL ::= {}\n"
+ " ErrSet ::= PT{ {PT{inst}}}\n"
+ " obj6 CL ::= 7\n"
+ " obj7 CL ::= int\n"
+ " obj8 NON-CLASS ::= { &id 1 }\n"
" CL ::= CLASS {\n"
" &code INTEGER UNIQUE,\n"
@@ -203,6 +303,12 @@ objects(Config) ->
" &code INTEGER UNIQUE,\n"
" &i INTEGER\n"
" }\n"
+
+ " PT{SMALL:Small} ::= SEQUENCE { a SMALL.&code ({Small}) }\n"
+ " inst SMALL ::= {&code 42, &i 4711}\n"
+
+ " int INTEGER ::= 42\n"
+ " NON-CLASS ::= SEQUENCE { a BOOLEAN }\n"
"END\n">>},
{error,
[
@@ -216,24 +322,490 @@ objects(Config) ->
{structured_error,{M,5},asn1ct_check,
{missing_mandatory_fields,[i],obj4}},
{structured_error,{M,6},asn1ct_check,
- {invalid_fields,[wrong],'InvalidSet'}}
+ {invalid_fields,[wrong],'InvalidSet'}},
+ {structured_error,{M,7},asn1ct_check,
+ {missing_mandatory_fields,
+ ['Data','Set','VarTypeValue',code,enum,object,
+ vartypevalue],obj5}},
+ {structured_error,{M,8},asn1ct_check,invalid_objectset},
+ {structured_error,{M,9},asn1ct_check,illegal_object},
+ {structured_error,{M,10},asn1ct_check,illegal_object},
+ {structured_error,{M,11},asn1ct_check,illegal_object}
+ ]
+ } = run(P, Config),
+ ok.
+
+object_field_extraction(Config) ->
+ M = 'ObjectFieldExtraction',
+ P = {M,
+ <<"ObjectFieldExtraction DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+
+ " DataObjSet DATA-CLASS ::= {\n"
+ " holder-object-1.&int,\n"
+ " ...\n"
+ " }\n"
+
+ " DataObjSetNoExt DATA-CLASS ::= {\n"
+ " holder-object-1.&int\n"
+ " }\n"
+
+ " holder-object-1 HOLDER-CLASS ::= {\n"
+ " &int 42\n"
+ " }\n"
+
+ " HOLDER-CLASS ::= CLASS {\n"
+ " &int INTEGER\n"
+ " }\n"
+
+ " DATA-CLASS ::= CLASS {\n"
+ " &id INTEGER\n"
+ " }\n"
+
+ "END\n">>},
+ {error,
+ [
+ {structured_error,{M,2},asn1ct_check,illegal_object},
+ {structured_error,{M,6},asn1ct_check,illegal_object}
+ ]
+ } = run(P, Config),
+ ok.
+
+object_sets(Config) ->
+ M = 'ObjectSets',
+ P = {M, <<"ObjectSets DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ "TEST-UNIQ ::= CLASS { &id INTEGER UNIQUE, &test INTEGER }\n"
+ "UniqSet TEST-UNIQ ::= { { &id 1, &test 1 } | {&id 1, &test 2} }\n"
+
+ "DOUBLE-UNIQ ::= CLASS { &id1 INTEGER UNIQUE,"
+ " &id INTEGER UNIQUE }\n"
+ "DoubleSet DOUBLE-UNIQ ::= { {&id1 1, &id2 2} }\n"
+ "END\n">>},
+ {error,
+ [{structured_error,{M,3},asn1ct_check,{non_unique_object,1}},
+ {structured_error,{M,5},asn1ct_check,multiple_uniqs}
+ ]
+ } = run(P, Config),
+ ok.
+
+oids(Config) ->
+ M = 'OIDS',
+ P = {M,<<"OIDS DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ "CONTAINER ::= CLASS { &id OBJECT IDENTIFIER UNIQUE,\n"
+ " &int INTEGER OPTIONAL,\n"
+ " &seq SEQUENCE { a INTEGER } OPTIONAL\n"
+ "}\n"
+
+ "-- This is line 6.\n"
+ "object-1 CONTAINER ::= { &id {1 2 3}, &int 42 }\n"
+ "object-2 CONTAINER ::= { &id {1 999}, &int 0 }\n"
+ "object-3 CONTAINER ::= { &id {1 2}, &seq { a 42 } }\n"
+ "oid-1 OBJECT IDENTIFIER ::= object-1.&int\n"
+ "oid-2 OBJECT IDENTIFIER ::= object-2.&id\n"
+ "oid-3 OBJECT IDENTIFIER ::= object-3.&seq\n"
+ "-- This is line 13.\n"
+
+ "oid-5 OBJECT IDENTIFIER ::= { a 42, b 19 }\n"
+
+ "oid-6 OBJECT IDENTIFIER ::= int\n"
+ "int INTEGER ::= 42\n"
+
+ "oid-7 OBJECT IDENTIFIER ::= seq\n"
+ "seq SEQUENCE { x INTEGER } ::= { x 11 }\n"
+
+ "oid-8 OBJECT IDENTIFIER ::= os\n"
+ "os OCTET STRING ::= '1234'H\n"
+
+ "oid-9 OBJECT IDENTIFIER ::= { 1 os }\n"
+
+ "oid-10 OBJECT IDENTIFIER ::= { 1 invalid }\n"
+
+ "-- This is line 23.\n"
+ "oid-11 OBJECT IDENTIFIER ::= { 0 legal-oid }\n"
+ "legal-oid OBJECT IDENTIFIER ::= {1 2 3}\n"
+
+ "bad-root-1 OBJECT IDENTIFIER ::= {99}\n"
+ "bad-root-2 OBJECT IDENTIFIER ::= {0 42}\n"
+
+ "oid-object-ref-1 OBJECT IDENTIFIER ::= object-1\n"
+ "oid-object-ref-2 OBJECT IDENTIFIER ::= { object-1 19 } \n"
+
+ "oid-int OBJECT IDENTIFIER ::= 42\n"
+ "oid-sequence OBJECT IDENTIFIER ::= {a 42, b 35}\n"
+
+ "END\n">>},
+ {error,
+ [
+ {structured_error,{M,8},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,10},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,11},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,12},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,14},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,15},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,17},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,19},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,21},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,22},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,24},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,26},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,27},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,28},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,29},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,30},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,31},asn1ct_check,{illegal_oid,o_id}}
]
} = run(P, Config),
ok.
+rel_oids(Config) ->
+ M = 'REL-OIDS',
+ P = {M,<<"REL-OIDS DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ "legal-oid OBJECT IDENTIFIER ::= {1 2}\n"
+ "legal-roid RELATIVE-OID ::= {1 2}\n"
+ "CONTAINER ::= CLASS { &oid OBJECT IDENTIFIER OPTIONAL,\n"
+ " &int INTEGER OPTIONAL,\n"
+ " &seq SEQUENCE { a INTEGER } OPTIONAL\n"
+ "}\n"
+ "object-1 CONTAINER ::= { &oid {1 2 3},\n"
+ " &int 42,\n",
+ " &seq {a 42}\n"
+ " }\n"
+
+ "wrong-type-rel-oid-1 RELATIVE-OID ::= legal-oid\n"
+ "wrong-type-rel-oid-2 RELATIVE-OID ::= object-1.&oid\n"
+ "wrong-type-rel-oid-3 RELATIVE-OID ::= object-1.&int\n"
+ "wrong-type-rel-oid-4 RELATIVE-OID ::= object-1.&seq\n"
+ "wrong-type-rel-oid-5 RELATIVE-OID ::= object-1.&undef\n"
+
+ "oid-bad-first OBJECT IDENTIFIER ::= {legal-roid 3}\n"
+ "END\n">>},
+ {error,
+ [
+ {structured_error,{M,12},asn1ct_check,{illegal_oid,rel_oid}},
+ {structured_error,{M,13},asn1ct_check,{illegal_oid,rel_oid}},
+ {structured_error,{M,14},asn1ct_check,{illegal_oid,rel_oid}},
+ {structured_error,{M,15},asn1ct_check,{illegal_oid,rel_oid}},
+ {structured_error,{M,16},asn1ct_check,{undefined_field,undef}},
+ {structured_error,{M,17},asn1ct_check,{illegal_oid,o_id}}
+ ]
+ } = run(P, Config),
+ ok.
+
+
parameterization(Config) ->
M = 'Parameterization',
P = {M,
<<"Parameterization DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
" NotUppercase{lowercase} ::= INTEGER (lowercase)\n"
+
+ " P{T1,T2} ::= SEQUENCE { a T1, b T2 }\n"
+ " S ::= P{OCTET STRING}\n"
+
+ " Seq ::= SEQUENCE { a INTEGER }\n"
+ " Sbad ::= Seq{INTEGER}\n"
+
+ "END\n">>},
+ {error,
+ [{structured_error,{M,2},asn1ct_check,
+ {illegal_typereference,lowercase}},
+ {structured_error,{M,4},asn1ct_check,
+ param_wrong_number_of_arguments},
+ {structured_error,{M,6},asn1ct_check,
+ {param_bad_type, 'Seq'}}
+ ]
+ } = run(P, Config),
+ ok.
+
+
+constructed(Config) ->
+ M = 'Const',
+ P = {M,
+ <<"Const DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ " Seq1 ::= SEQUENCE {a INTEGER, b BIT STRING, a BOOLEAN}\n"
+ " Ch ::= CHOICE {a INTEGER, b BIT STRING, a BOOLEAN}\n"
+ " Seq2 ::= SEQUENCE {COMPONENTS OF Ch}\n"
+ " CL ::= CLASS { &id INTEGER UNIQUE, &Type }\n"
+ " Seq3 ::= SEQUENCE { id CL.&id, d CL.&foo }\n"
+
+ " Seq4 ::= SEQUENCE { a INTEGER, z INTEGER OPTIONAL, b Set1 }\n"
+ " Set1 ::= SET { c BOOLEAN, d INTEGER }\n"
+ " s1 Seq4 ::= {a 42, b {c TRUE, zz 4711}}\n"
+ " s2 Seq4 ::= {a 42, b {c TRUE, d FALSE}}\n"
+ " s3 Seq4 ::= {a 42, b {c TRUE}}\n"
+ " s4 Seq4 ::= {a 42, b {c TRUE, d 4711}, zz 4712}\n"
+ " s5 Seq4 ::= {a 42}\n"
+ " s6 Seq4 ::= {a 42, zz 4712, b {c TRUE, d 4711}}\n"
"END\n">>},
{error,
- [{structured_error,{'Parameterization',2},asn1ct_check,
- {illegal_typereference,lowercase}}
- ]
- } = run(P, Config),
+ [{structured_error,{M,2},asn1ct_check,{duplicate_identifier,a}},
+ {structured_error,{M,3},asn1ct_check,{duplicate_identifier,a}},
+ {structured_error,{M,4},asn1ct_check,{illegal_COMPONENTS_OF,'Ch'}},
+ {structured_error,{M,6},asn1ct_check,{illegal_object_field,foo}},
+
+ {structured_error,{M,9},asn1ct_check,{illegal_id,zz}},
+ {structured_error,{M,10},asn1ct_check,illegal_integer_value},
+ {structured_error,{M,11},asn1ct_check,{missing_id,d}},
+ {structured_error,{M,12},asn1ct_check,{illegal_id,zz}},
+ {structured_error,{M,13},asn1ct_check,{missing_id,b}},
+ {structured_error,{M,14},asn1ct_check,{illegal_id,zz}}
+ ]
+ } = run(P, Config),
+ ok.
+
+syntax(Config) ->
+ M = 'Syntax',
+ P = {M,
+ <<"Syntax DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ " obj1 CL ::= { WRONG }\n"
+ " obj2 CL ::= { CODE 42 AGAIN WRONG }\n"
+ " obj3 CL ::= { INTEGER }\n"
+ " obj4 CL ::= { BIT STRING }\n"
+ " obj5 CL ::= { , }\n"
+ " obj6 CL ::= { CODE , }\n"
+ " obj7 CL ::= { CODE \"abc\" }\n"
+ " obj8 CL ::= { CODE }\n"
+ " obj9 CL ::= { CODE 42 ENUM}\n"
+ " obj10 CL ::= { CODE 42 ENUM BIT STRING}\n"
+
+ " obj11 CL ::= { CODE 42 TYPE 13}\n"
+ " obj12 CL ::= { CODE 42 TYPE d}\n"
+ " obj13 CL ::= { CODE 42 TYPE bs-value}\n"
+
+ " bad-syntax-1 BAD-SYNTAX-1 ::= { BAD 42 }\n"
+
+ " obj14 CL ::= { CODE 42 OBJ-SET integer }\n"
+ " obj15 CL ::= { CODE 42 OBJ-SET { A B } }\n"
+ " obj16 CL ::= { CODE 42 OBJ-SET SEQUENCE { an INTEGER } }\n"
+
+ " obj17 CL ::= { CODE 42 OID {seqtag 42} }\n"
+ " obj18 CL ::= { CODE 42 OID {seqtag 42, seqtag-again 43} }\n"
+ " obj19 CL ::= { CODE 42 OID {one 1 two 2} }\n"
+
+ " BAD-SYNTAX-1 ::= CLASS {\n"
+ " &code INTEGER UNIQUE\n"
+ " } WITH SYNTAX {\n"
+ " BAD &bad\n"
+ " }\n"
+
+ " BAD-SYNTAX-2 ::= CLASS {\n"
+ " &code INTEGER UNIQUE\n"
+ " } WITH SYNTAX {\n"
+ " BAD &Bad\n"
+ " }\n"
+
+ " BAD-SYNTAX-3 ::= CLASS {\n"
+ " &code INTEGER UNIQUE\n"
+ " } WITH SYNTAX {\n"
+ " [ID &code]\n"
+ " }\n"
+
+ " BAD-SYNTAX-4 ::= CLASS {\n"
+ " &code INTEGER UNIQUE\n"
+ " } WITH SYNTAX {\n"
+ " ID\n"
+ " }\n"
+
+ " BAD-SYNTAX-5 ::= CLASS {\n"
+ " &code INTEGER UNIQUE,\n"
+ " &Type\n"
+ " } WITH SYNTAX {\n"
+ " ID\n"
+ " }\n"
+
+ " BAD-SYNTAX-6 ::= CLASS {\n"
+ " &code INTEGER UNIQUE\n"
+ " } WITH SYNTAX {\n"
+ " ID &code, &code\n"
+ " }\n"
+
+ " BAD-SYNTAX-7 ::= CLASS {\n"
+ " &code INTEGER UNIQUE,\n"
+ " &Type\n"
+ " } WITH SYNTAX {\n"
+ " ID &Type, &code, &code, &Type\n"
+ " }\n"
+
+ " CL ::= CLASS {\n"
+ " &code INTEGER UNIQUE,\n"
+ " &enum ENUMERATED { a, b, c} OPTIONAL,\n"
+ " &Type OPTIONAL,\n"
+ " &ObjSet CL OPTIONAL,\n"
+ " &oid OBJECT IDENTIFIER OPTIONAL\n"
+ " } WITH SYNTAX {\n"
+ " CODE &code [ENUM &enum] [TYPE &Type] [OBJ-SET &ObjSet]\n"
+ " [OID &oid]\n"
+ " }\n"
+
+ " bs-value BIT STRING ::= '1011'B\n"
+
+ " integer INTEGER ::= 42\n"
+ "END\n">>},
+ {error,
+ [
+ {structured_error,{M,2},asn1ct_check,
+ {syntax_nomatch,"WRONG"}},
+ {structured_error,{M,3},asn1ct_check,
+ {syntax_nomatch,"AGAIN"}},
+ {structured_error,{M,4},asn1ct_check,
+ {syntax_nomatch,"INTEGER"}},
+ {structured_error,{M,5},asn1ct_check,
+ {syntax_nomatch,"BIT STRING"}},
+ {structured_error,{M,6},asn1ct_check,
+ {syntax_nomatch,"\",\""}},
+ {structured_error,{M,7},asn1ct_check,
+ {syntax_nomatch,"\",\""}},
+ {structured_error,{M,8},asn1ct_check,
+ {syntax_nomatch,"\"abc\""}},
+ {structured_error,{M,9},asn1ct_check,
+ syntax_nomatch},
+ {structured_error,{M,10},asn1ct_check,
+ syntax_nomatch},
+ {structured_error,{M,11},asn1ct_check,
+ {syntax_nomatch,"BIT STRING"}},
+ {structured_error,{M,12},asn1ct_check,
+ {syntax_nomatch,"13"}},
+ {structured_error,{M,13},asn1ct_check,
+ {syntax_nomatch,"d"}},
+ {structured_error,{M,14},asn1ct_check,
+ {syntax_nomatch,"bs-value"}},
+ {structured_error,{M,15},asn1ct_check,
+ {syntax_undefined_field,bad}},
+ {structured_error,{M,16},asn1ct_check,
+ {syntax_nomatch,"integer"}},
+ {structured_error,{M,17},asn1ct_check,
+ {syntax_nomatch,"\"A B\""}},
+ {structured_error,{M,18},asn1ct_check,
+ {syntax_nomatch,"SEQUENCE"}},
+ {structured_error,{M,19},asn1ct_check,
+ {syntax_nomatch,"\"seqtag 42\""}},
+ {structured_error,{M,20},asn1ct_check,
+ {syntax_nomatch,"\"seqtag 42 seqtag-again 43\""}},
+ {structured_error,{M,21},asn1ct_check,
+ {syntax_nomatch,"\"one 1 two 2\""}},
+ {structured_error,{M,22},asn1ct_check,
+ {syntax_undefined_field,bad}},
+ {structured_error,{M,27},asn1ct_check,
+ {syntax_undefined_field,'Bad'}},
+ {structured_error,{M,32},asn1ct_check,
+ {syntax_mandatory_in_optional_group,code}},
+ {structured_error,{M,37},asn1ct_check,
+ {syntax_missing_mandatory_fields,[code]}},
+ {structured_error,{M,42},asn1ct_check,
+ {syntax_missing_mandatory_fields,['Type',code]}},
+ {structured_error,{M,48},asn1ct_check,
+ {syntax_duplicated_fields,[code]}},
+ {structured_error,{M,53},asn1ct_check,
+ {syntax_duplicated_fields,['Type',code]}}
+ ]
+ } = run(P, Config),
+ ok.
+
+table_constraints(Config) ->
+ M = 'TableConstraints',
+ P = {M,
+ <<"TableConstraints DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ " Seq-1 ::= SEQUENCE {\n"
+ " contentType CONTENTS.&id,\n"
+ " content CONTENTS.&Type({Contents}{@contentType})\n"
+ " }\n"
+
+ " Seq-2 ::= SEQUENCE {\n"
+ " contentType INTEGER,\n"
+ " content CONTENTS.&Type({Contents}{@contentType})\n"
+ " }\n"
+
+ " Int ::= INTEGER ({1})\n"
+
+ " Seq-3 ::= SEQUENCE {\n"
+ " contentType CONTENTS.&id({1})\n"
+ " }\n"
+
+ "Contents CONTENTS ::= {\n"
+ " {OCTET STRING IDENTIFIED BY {2 1 1}}\n"
+ "}\n"
+
+ "CONTENTS ::= TYPE-IDENTIFIER\n"
+ "END\n">>},
+ {error,
+ [{structured_error,
+ {M,2},asn1ct_check,
+ {missing_table_constraint,contentType}},
+ {structured_error,
+ {M,6},asn1ct_check,
+ {missing_ocft,contentType}},
+ {structured_error,
+ {M,10},asn1ct_check,
+ illegal_table_constraint},
+ {structured_error,
+ {M,11},asn1ct_check,
+ invalid_table_constraint}
+ ]} = run(P, Config),
+ ok.
+
+tags(Config) ->
+ M = 'Tags',
+ P = {M,
+ <<"Tags DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ "SeqOpt1 ::= SEQUENCE\n"
+ "{\n"
+ "bool1 BOOLEAN OPTIONAL,\n"
+ "int1 INTEGER,\n"
+ "seq1 SeqIn OPTIONAL\n"
+ "}\n"
+
+ "SeqOpt1Imp ::= SEQUENCE \n"
+ "{\n"
+ "bool1 [1] BOOLEAN OPTIONAL,\n"
+ "int1 INTEGER,\n"
+ "seq1 [2] SeqIn OPTIONAL,\n"
+ "seq2 [2] SeqIn OPTIONAL,\n"
+ "...,\n"
+ "int2 [3] SeqIn,\n"
+ "int3 [3] SeqIn\n"
+ "}\n"
+
+ "SeqIn ::= SEQUENCE \n"
+ "{\n"
+ "boolIn BOOLEAN,\n"
+ "intIn INTEGER\n"
+ "}\n"
+ "\n"
+
+
+ "Set1 ::= SET {\n"
+ " os [0] OCTET STRING,\n"
+ " bool [0] BOOLEAN\n"
+ "}\n"
+
+ "Seq1 ::= SEQUENCE {\n"
+ "a [0] IMPLICIT Choice OPTIONAL\n"
+ "}\n"
+ "Seq2 ::= SEQUENCE {\n"
+ "a [0] IMPLICIT ANY OPTIONAL\n"
+ "}\n"
+ "Choice ::=\n"
+ "CHOICE {\n"
+ "a [0] BOOLEAN,\n"
+ "b [1] INTEGER\n"
+ "}\n"
+
+ "END\n">>},
+ {error,
+ [{structured_error,
+ {M,8},asn1ct_check,
+ {duplicate_tags,[seq1,seq2]}},
+ {structured_error,
+ {M,24},asn1ct_check,
+ {duplicate_tags,[bool,os]}},
+ {structured_error,
+ {M,28},asn1ct_check,
+ {implicit_tag_before,choice}},
+ {structured_error,
+ {M,31},asn1ct_check,
+ {implicit_tag_before,open_type}}
+ ]} = run(P, Config),
ok.
+
values(Config) ->
M = 'Values',
P = {M,
@@ -241,6 +813,53 @@ values(Config) ->
" os1 OCTET STRING ::= \"abc\"\n"
" os2 OCTET STRING ::= 42\n"
" os3 OCTET STRING ::= { 1, 3 }\n"
+ " os4 OCTET STRING ::= '1234'H\n"
+ " Seq ::= SEQUENCE {\n"
+ " an OCTET STRING\n"
+ " }\n"
+ " seq Seq ::= { an int }\n"
+ " os5 OCTET STRING ::= holder-1.&str\n"
+ " os6 OCTET STRING ::= int\n"
+
+ " int1 INTEGER ::= \"string\"\n"
+ " int2 INTEGER ::= os4\n"
+ " int3 INTEGER ::= not-defined\n"
+ " int4 INTEGER ::= holder-1.&str\n"
+ " int5 INTEGER ::= holder-2.&obj\n"
+ " int6 INTEGER ::= holder-2.&undefined-field\n"
+ " int7 INTEGER ::= holder-2.&UndefinedField.&id\n"
+
+ " bs1 BIT STRING ::= 42\n"
+ " bs2 BIT STRING ::= {a,b}\n"
+ " bs3 BIT STRING {a(0),z(25)} ::= {a,b}\n"
+ " bs4 BIT STRING {a(0),z(25)} ::= int\n"
+ " bs5 BIT STRING ::= holder-2.&str\n"
+ " bs6 BIT STRING ::= holder-2.&obj\n"
+
+ " b1 BOOLEAN ::= 42\n"
+ " b2 BOOLEAN ::= {a,b}\n"
+
+ " HOLDER ::= CLASS {\n"
+ " &str IA5String,\n"
+ " &obj HOLDER OPTIONAL\n"
+ " }\n"
+
+ " holder-1 HOLDER ::= { &str \"xyz\" }\n"
+ " holder-2 HOLDER ::= { &str \"xyz\", &obj holder-1 }\n"
+
+ " ext-1 EXTERNAL ::= {identification bad:{1 2 3}, data-value '123'H}\n"
+ " ext-2 EXTERNAL ::= {identification syntax:{1 2 3}, data '123'H}\n"
+
+ " CH ::= CHOICE { a INTEGER, b BOOLEAN }\n"
+ " ch1 CH ::= 2344\n"
+ " ch2 CH ::= zz:34\n"
+
+ " st1 an < Seq ::= 42\n"
+ " st2 zz < CH ::= 42\n"
+ " st3 a < HOLDER ::= 42\n"
+ " st4 a < INTEGER ::= 42\n"
+
+ " int INTEGER ::= 42\n"
"END\n">>},
{error,
[
@@ -249,7 +868,59 @@ values(Config) ->
{structured_error,{M,3},asn1ct_check,
illegal_octet_string_value},
{structured_error,{M,4},asn1ct_check,
- illegal_octet_string_value}
+ illegal_octet_string_value},
+ {structured_error,{M,9},asn1ct_check,
+ illegal_octet_string_value},
+ {structured_error,{M,10},asn1ct_check,
+ illegal_octet_string_value},
+ {structured_error,{M,11},asn1ct_check,
+ illegal_octet_string_value},
+ {structured_error,{M,12},asn1ct_check,
+ illegal_integer_value},
+ {structured_error,{M,13},asn1ct_check,
+ illegal_integer_value},
+ {structured_error,{M,14},asn1ct_check,
+ illegal_integer_value},
+ {structured_error,{M,15},asn1ct_check,
+ illegal_integer_value},
+ {structured_error,{M,16},asn1ct_check,
+ illegal_integer_value},
+ {structured_error,{M,17},asn1ct_check,
+ {undefined_field,'undefined-field'}},
+ {structured_error,{M,18},asn1ct_check,
+ {undefined_field,'UndefinedField'}},
+ {structured_error,{M,19},asn1ct_check,
+ {illegal_value, "BIT STRING"}},
+ {structured_error,{M,20},asn1ct_check,
+ {illegal_value, "BIT STRING"}},
+ {structured_error,{M,21},asn1ct_check,
+ {illegal_value, "BIT STRING"}},
+ {structured_error,{M,22},asn1ct_check,
+ {illegal_value, "BIT STRING"}},
+ {structured_error,{M,23},asn1ct_check,
+ {illegal_value, "BIT STRING"}},
+ {structured_error,{M,24},asn1ct_check,
+ {illegal_value, "BIT STRING"}},
+ {structured_error,{M,25},asn1ct_check,
+ {illegal_value, "BOOLEAN"}},
+ {structured_error,{M,26},asn1ct_check,
+ {illegal_value, "BOOLEAN"}},
+ {structured_error,{M,33},asn1ct_check,
+ illegal_external_value},
+ {structured_error,{M,34},asn1ct_check,
+ illegal_external_value},
+ {structured_error,{M,36},asn1ct_check,
+ {illegal_id, 2344}},
+ {structured_error,{M,37},asn1ct_check,
+ {illegal_id, zz}},
+ {structured_error,{M,38},asn1ct_check,
+ {illegal_choice_type, 'Seq'}},
+ {structured_error,{M,39},asn1ct_check,
+ {illegal_id, zz}},
+ {structured_error,{M,40},asn1ct_check,
+ {illegal_choice_type, 'HOLDER'}},
+ {structured_error,{M,41},asn1ct_check,
+ {illegal_choice_type, 'INTEGER'}}
]
} = run(P, Config),
ok.
@@ -258,5 +929,7 @@ values(Config) ->
run({Mod,Spec}, Config) ->
Base = atom_to_list(Mod) ++ ".asn1",
File = filename:join(?config(priv_dir, Config), Base),
+ Include0 = filename:dirname(?config(data_dir, Config)),
+ Include = filename:join(filename:dirname(Include0), "asn1_SUITE_data"),
ok = file:write_file(File, Spec),
- asn1ct:compile(File).
+ asn1ct:compile(File, [{i, Include}]).
diff --git a/lib/asn1/test/syntax_SUITE.erl b/lib/asn1/test/syntax_SUITE.erl
new file mode 100644
index 0000000000..1a2c938fe5
--- /dev/null
+++ b/lib/asn1/test/syntax_SUITE.erl
@@ -0,0 +1,340 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(syntax_SUITE).
+-export([suite/0,all/0,groups/0,
+ assignment/1,
+ class/1,
+ constraints/1,
+ exports/1,
+ header/1,
+ imports/1,
+ objects/1,
+ sequence/1,
+ syntax/1,
+ tokenizer/1,
+ types/1,
+ values/1]).
+
+-include_lib("test_server/include/test_server.hrl").
+
+suite() -> [{ct_hooks, [ts_install_cth]}].
+
+all() ->
+ [{group,p}].
+
+groups() ->
+ [{p,parallel(),
+ [assignment,
+ class,
+ constraints,
+ exports,
+ header,
+ imports,
+ objects,
+ sequence,
+ syntax,
+ tokenizer,
+ types,
+ values]}].
+
+parallel() ->
+ case erlang:system_info(schedulers) > 1 of
+ true -> [parallel];
+ false -> []
+ end.
+
+assignment(Config) ->
+ Head = "Assignment DEFINITIONS AUTOMATIC TAGS ::=\nBEGIN\n",
+ End = "\nEND\n",
+ L0 = [{"42",3,{syntax_error,42}},
+ {"i",4,{syntax_error,'END'}},
+ {"i ::=",3,{syntax_error,'::='}},
+ {"i type",4,{syntax_error,'END'}},
+ {"i type ::=",3,{syntax_error,'::='}},
+ {"i TYPE",4,{syntax_error,'END'}},
+ {"i TYPE ::= ",4,{syntax_error,'END'}},
+ {"i INTEGER ::= 42 garbage",4,{syntax_error,'END'}},
+ {"i{T} Type",4,{syntax_error,'END'}},
+ {"TYPE",4,{syntax_error,'END'}},
+ {"TYPE ::=",4,{syntax_error,'END'}},
+ {"TYPE{ ::=",3,{syntax_error,'::='}},
+ {"TYPE{P, ::=",3,{syntax_error,'::='}},
+ {"TYPE{P,} ::=",3,{syntax_error,'}'}},
+ {"TYPE{Gov:} ::=",3,{syntax_error,':'}},
+ {"TYPE{A} CL ",4,{syntax_error,'END'}},
+ {"ObjSet CL",4,{syntax_error,'END'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Assignment", Config),
+ ok.
+
+class(Config) ->
+ Head = "Class DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " CL ::= CLASS {",
+ End = "\nEND\n",
+ L0 = [{"id",3,{syntax_error,'id'}},
+ {"&id INTEGER",4,{syntax_error,'END'}},
+ {"&id INTEGER,",4,{syntax_error,'END'}},
+ {"&id,",3,{syntax_error,','}},
+ {"&id OPTIONAL",3,{syntax_error,'OPTIONAL'}},
+ {"&id INTEGER OPTIONAL",4,{syntax_error,'END'}},
+ {"&var &Field",4,{syntax_error,'END'}},
+ {"&Type,",4,{syntax_error,'END'}},
+ {"&Type OPTIONAL",4,{syntax_error,'END'}},
+ {"&ValueSet INTEGER OPTIONAL",4,{syntax_error,'END'}},
+ {"&ValueSet INTEGER DEFAULT",4,{syntax_error,'END'}},
+ {"&ValueSet INTEGER DEFAULT {",4,{syntax_error,'END'}},
+ {"&ValueSet INTEGER DEFAULT {a",4,{syntax_error,'END'}},
+ {"&Var &Field",4,{syntax_error,'END'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Class", Config),
+ ok.
+
+constraints(Config) ->
+ Head = "Constraints DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " Type ::= ",
+ End = "\nEND\n",
+ L0 = [{"INTEGER (",4,{syntax_error,'END'}},
+ {"INTEGER (10x",3,{syntax_error,x}},
+ {"INTEGER (10|(10y",3,{syntax_error,y}},
+ {"INTEGER (CONSTRAINED BY {}",4,{syntax_error,'END'}},
+ {"INTEGER (CONSTRAINED BY {INTEGER garbage",3,
+ {syntax_error,garbage}},
+ {"INTEGER ({ObjSet",4,{syntax_error,'END'}},
+ {"INTEGER ({ObjSet}{",3,{syntax_error,'{'}},
+ {"INTEGER ({ObjSet}{@",3,{syntax_error,'{'}},
+ {"INTEGER ({ObjSet}{@x",3,{syntax_error,'{'}},
+ {"INTEGER ({ObjSet}{@x}",4,{syntax_error,'END'}},
+ {"INTEGER (10 !BOOLEAN",4,{syntax_error,'END'}},
+ {"INTEGER (10 !BOOLEAN:",4,{syntax_error,'END'}},
+ {"INTEGER (10 !BOOLEAN:FALSE",4,{syntax_error,'END'}},
+ {"SEQUENCE {} (WITH COMPONENTS { Type })",
+ 3,{syntax_error,'Type'}},
+ {"SEQUENCE {} (WITH COMPONENTS { x (10)",
+ 4,{syntax_error,'END'}},
+ {"SEQUENCE {} (WITH COMPONENTS { ..., x (10)",
+ 4,{syntax_error,'END'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Constraints", Config),
+ ok.
+
+exports(Config) ->
+ Head = "Exports DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " EXPORTS ",
+ End = "\nEND\n",
+ L0 = [{"Type",4,{syntax_error,'END'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Exports", Config),
+ ok.
+
+header(Config) ->
+ L = [{"lowercase",1,{syntax_error,lowercase}},
+ {"H ",2,{syntax_error,'END-OF-FILE'}},
+ {"H-",1,{syntax_error,'-'}},
+ {"42",1,{syntax_error,42}},
+ {"H definitions",1,{syntax_error,definitions}},
+ {"H DEFINITIONS STUPID TAGS",1,{syntax_error,'STUPID'}},
+ {"H DEFINITIONS WHATEVER",1,{syntax_error,'WHATEVER'}},
+ {"H DEFINITIONS ::= BEGIN",2,{syntax_error,'END-OF-FILE'}},
+ {"BOOLEAN",1,{syntax_error,'BOOLEAN'}}
+ ],
+ run(L, "H", Config),
+ ok.
+
+imports(Config) ->
+ Head = "Imports DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " IMPORTS ",
+ End = "\nEND\n",
+ L0 = [{"Type FROM X",4,{syntax_error,'END'}},
+ {"Symbols TO Y",3,{syntax_error,'TO'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Imports", Config),
+ ok.
+
+objects(Config) ->
+ Head = "Objects DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " object CLASS-NAME ::= ",
+ End = "\nEND\n",
+ L0 = [{"{",4,{syntax_error,'END'}},
+ {"{&min 1, max 10}",3,{syntax_error,max}},
+ {"{&min 1, Max 10}",3,{syntax_error,'Max'}},
+ {"{min 1, &max 10}",3,{syntax_error,'&max'}},
+ {"{min 1, &Max 10}",3,{syntax_error,'&Max'}},
+ {"{RESERVERD WORD BIT}",3,{syntax_error,'BIT'}},
+ {"{&min 1",4,{syntax_error,'END'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Objects", Config),
+ ok.
+
+sequence(Config) ->
+ Head = "Sequence DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " Type ::= SEQUENCE {",
+ End = "\nEND\n",
+ L0 = [{"",4,{syntax_error,'END'}},
+ {" UpperCase",3,{syntax_error,'UpperCase'}},
+ {" a b",4,{syntax_error,'END'}},
+ {" i INTEGER",4,{syntax_error,'END'}},
+ {" ...",4,{syntax_error,'END'}},
+ {" ..., [[",4,{syntax_error,'END'}},
+ {" ..., [[ a INTEGER ]",3,{syntax_error,']'}},
+ {" ..., [[ a INTEGER,",3,{syntax_error,','}},
+ {" ..., [[ a INTEGER, ... ]]",3,{syntax_error,','}},
+ {" ... !42 xxx",3,{syntax_error,'xxx'}},
+ {" ... !42, a INTEGER,",3,{syntax_error,','}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Sequence", Config),
+ ok.
+
+syntax(Config) ->
+ Head = "Syntax DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " CL ::= CLASS { &id INTEGER UNIQUE } WITH SYNTAX ",
+ End = "\nEND\n",
+ L0 = [{"{}",3,{syntax_error,'}'}},
+ {"WORD",3,{syntax_error,'WORD'}},
+ {"{ Word }",3,{syntax_error,'Word'}},
+ {"{ [ Word ] }",3,{syntax_error,'Word'}},
+ {"{ [ WORD }",3,{syntax_error,'}'}},
+ {"{ WORD;",3,{syntax_error,';'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Syntax", Config),
+ ok.
+
+tokenizer(Config) ->
+ Head = "Tokenize DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n",
+ End = "\nEND\n",
+ L0 = [{"'",3,eol_in_token},
+ {"'42'B",3,{invalid_binary_number,"42"}},
+ {"'ZZZ'H",3,{invalid_hex_number,"ZZZ"}},
+ {"\"abc",3,missing_quote_at_eof},
+ {"/*",3,eof_in_comment}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Tokenizer", Config, asn1ct_tok),
+ ok.
+
+types(Config) ->
+ Head = "Types DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " Type ::= ",
+ End = "\nEND\n",
+ L0 = [{"BIT STRING garbage",4,{syntax_error,'END'}},
+ {"BIT STRING {",4,{syntax_error,'END'}},
+ {"BIT STRING { a(42",3,{syntax_error,42}},
+ {"BIT STRING { a(0)",4,{syntax_error,'END'}},
+ {"CHOICE {",4,{syntax_error,'END'}},
+ {"CHOICE { ..., a}",3,{syntax_error,'...'}},
+ {"CHOICE { UpperCase",3,{syntax_error,'UpperCase'}},
+ {"CHOICE { i INTEGER",4,{syntax_error,'END'}},
+ {"CHOICE { ..., i INTEGER }",3,{syntax_error,'...'}},
+ {"CHOICE { b BOOLEAN, ..., i INTEGER",
+ 4,{syntax_error,'END'}},
+ {"CHOICE { b BOOLEAN, ..., [[ e BOOLEAN, ...]]}",
+ 3,{syntax_error,','}},
+ {"CHOICE { b BOOLEAN, ..., i INTEGER, ..., x BIT STRING}",
+ 3,{syntax_error,','}},
+ {"ENUMERATED {",4,{syntax_error,'END'}},
+ {"ENUMERATED { 42 }",3,{syntax_error,42}},
+ {"ENUMERATED { a, b",4,{syntax_error,'END'}},
+ {"ENUMERATED { a, }",3,{syntax_error,','}},
+ {"ENUMERATED { a, ...,\nb, ..., c }",4,{syntax_error,','}},
+ {"INTEGER {",4,{syntax_error,'END'}},
+ {"INTEGER { a(42)",4,{syntax_error,'END'}},
+ {"SEQUENCE",3,{syntax_error,'SEQUENCE'}},
+ %% More tests for SEQUENCE in sequence/1.
+ {"SEQUENCE SIZE (1..10)",4,{syntax_error,'END'}},
+ {"SEQUENCE (SIZE (1..10))",4,{syntax_error,'END'}},
+ {"SET { i INTEGER",4,{syntax_error,'END'}},
+ {"SET { ...",4,{syntax_error,'END'}},
+ {"SET SIZE (1..10)",4,{syntax_error,'END'}},
+ {"SET (SIZE (1..10))",4,{syntax_error,'END'}},
+ {"SET { ... !42 xxx",3,{syntax_error,'xxx'}},
+ {"SET { ... !42, a INTEGER,",3,{syntax_error,','}},
+ {"[",4,{syntax_error,'END'}},
+ {"[42",4,{syntax_error,'END'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Types", Config),
+ ok.
+
+values(Config) ->
+ Head = "Values DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " value Type ::= ",
+ End = "\nEND\n",
+ L0 = [{"",4,{syntax_error,'END'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Values", Config),
+ ok.
+
+run(List, File, Config) ->
+ run(List, File, Config, asn1ct_parser2).
+
+run(List, File0, Config, Module) ->
+ Base = File0 ++ ".asn1",
+ File = filename:join(?config(priv_dir, Config), Base),
+ case run_1(List, Base, File, Module, 0) of
+ 0 -> ok;
+ Errors -> ?t:fail(Errors)
+ end.
+
+run_1([{Source,Line,Error}=Exp|T], Base, File, Module, N) ->
+ ok = file:write_file(File, Source),
+ io:format("~s", [Source]),
+ case asn1ct:compile(File) of
+ {error,[{structured_error,{Base,L},Module,E}]} ->
+ case {L,E} of
+ {Line,Error} ->
+ run_1(T, Base, File, Module, N);
+ {Line,OtherError} ->
+ io:format("*** Wrong error: ~p, expected ~p ***\n",
+ [OtherError,Error]),
+ run_1(T, Base, File, Module, N+1);
+ {OtherLine,Error} ->
+ io:format("*** Wrong line: ~p, expected ~p ***\n",
+ [OtherLine,Line]),
+ run_1(T, Base, File, Module, N+1);
+ {_,_} ->
+ io:format("*** Wrong line: ~p, expected ~p ***",
+ [L,Line]),
+ io:format("*** Wrong error: ~p, expected ~p ***\n",
+ [E,Error]),
+ run_1(T, Base, File, Module, N+1)
+ end;
+ Other ->
+ io:format("~p\nGOT: ~p", [Exp,Other])
+ end;
+run_1([], _, _, _, N) ->
+ N.
diff --git a/lib/asn1/test/testChoExtension.erl b/lib/asn1/test/testChoExtension.erl
index 09e19ceebb..f36d6c1cbf 100644
--- a/lib/asn1/test/testChoExtension.erl
+++ b/lib/asn1/test/testChoExtension.erl
@@ -39,11 +39,6 @@ extension(_Rules) ->
roundtrip('ChoExt3', {int,33}),
roundtrip('ChoExt4', {str,<<"abc">>}),
- roundtrip('ChoEmptyRoot', {bool,false}),
- roundtrip('ChoEmptyRoot', {bool,true}),
- roundtrip('ChoEmptyRoot', {int,0}),
- roundtrip('ChoEmptyRoot', {int,7}),
-
ok.
diff --git a/lib/asn1/test/testConstraints.erl b/lib/asn1/test/testConstraints.erl
index 3ccf883bd6..5d65cb8d73 100644
--- a/lib/asn1/test/testConstraints.erl
+++ b/lib/asn1/test/testConstraints.erl
@@ -231,6 +231,28 @@ int_constraints(Rules) ->
seq_roundtrip(Rules, 'SeqOverlapping', 'SeqNonOverlapping', 19000),
seq_roundtrip(Rules, 'SeqOverlapping', 'SeqNonOverlapping', 26900),
+ %%==========================================================
+ %% Constraints from object fields.
+ %%==========================================================
+ range_error(Rules, 'IntObjectConstr', 1),
+ roundtrip('IntObjectConstr', 2),
+ roundtrip('IntObjectConstr', 3),
+ roundtrip('IntObjectConstr', 4),
+ range_error(Rules, 'IntObjectConstr', 5),
+
+
+ %%==========================================================
+ %% INTEGER constraints defined using named INTEGERs.
+ %%==========================================================
+ 42 = 'Constraints':'constrainedNamedInt-1'(),
+ 100 = 'Constraints':'constrainedNamedInt-2'(),
+ range_error(Rules, 'ConstrainedNamedInt', 41),
+ roundtrip('ConstrainedNamedInt', v1),
+ range_error(Rules, 'ConstrainedNamedInt', 43),
+
+ range_error(Rules, 'SeqWithNamedInt', {'SeqWithNamedInt',-100}),
+ roundtrip('SeqWithNamedInt', {'SeqWithNamedInt',v2}),
+
ok.
%% PER: Ensure that if the lower bound is Lb, Lb+16#80 is encoded
diff --git a/lib/asn1/test/testDoubleEllipses.erl b/lib/asn1/test/testDoubleEllipses.erl
index 4e8972cdfc..3caa166ae0 100644
--- a/lib/asn1/test/testDoubleEllipses.erl
+++ b/lib/asn1/test/testDoubleEllipses.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -24,17 +24,20 @@
-include_lib("test_server/include/test_server.hrl").
-record('Seq',{a, c}).
+-record('SeqV1',{a, b}).
-record('SeqV2',{a, b ,c}).
-record('SeqAlt',{a,d,b,e,c,f,g}).
-record('SeqAltV2',{a,d,b,e,h,i,c,f,g}).
-record('Set',{a, c}).
+-record('SetV1',{a, b}).
-record('SetV2',{a, b ,c}).
-record('SetAlt',{a,d,b,e,c,f,g}).
-record('SetAltV2',{a,d,b,e,h,i,c,f,g}).
main(_Rules) ->
roundtrip('Seq', #'Seq'{a=10,c=true}),
+ roundtrip('SeqV1', #'SeqV1'{a=10,b=false}),
roundtrip('SeqV2', #'SeqV2'{a=10,b=false,c=true}),
roundtrip('SeqAlt',
#'SeqAlt'{a=10,d=12,b = <<2#1010:4>>,
@@ -45,6 +48,7 @@ main(_Rules) ->
e=true,h="PS",i=13,c=false,f=14,g=16}),
roundtrip('Set', #'Set'{a=10,c=true}),
+ roundtrip('SetV1', #'SetV1'{a=10,b=false}),
roundtrip('SetV2', #'SetV2'{a=10,b=false,c=true}),
roundtrip('SetAlt',
#'SetAlt'{a=10,d=12,
diff --git a/lib/asn1/test/testEnumExt.erl b/lib/asn1/test/testEnumExt.erl
index 878518be11..29995d6340 100644
--- a/lib/asn1/test/testEnumExt.erl
+++ b/lib/asn1/test/testEnumExt.erl
@@ -78,6 +78,9 @@ common(Erule) ->
v_roundtrip(Erule, 'SeqBig', {'SeqBig',true,e40,9357}),
v_roundtrip(Erule, 'SeqBig', {'SeqBig',true,e80,9357}),
+
+ v_roundtrip(Erule, 'EnumSkip', d),
+
ok.
roundtrip(Type, Value) ->
@@ -85,11 +88,20 @@ roundtrip(Type, Value) ->
v_roundtrip(Erule, Type, Value) ->
Encoded = roundtrip(Type, Value),
- Encoded = asn1_test_lib:hex_to_bin(v(Erule, Value)).
-
-v(ber, {'SeqBig',true,e40,9357}) -> "300A8001 FF810141 8202248D";
-v(ber, {'SeqBig',true,e80,9357}) -> "300B8001 FF810200 81820224 8D";
-v(per, {'SeqBig',true,e40,9357}) -> "E0014002 248D";
-v(per, {'SeqBig',true,e80,9357}) -> "E0018002 248D";
-v(uper, {'SeqBig',true,e40,9357}) -> "E0280044 91A0";
-v(uper, {'SeqBig',true,e80,9357}) -> "E0300044 91A0".
+ Encoded = asn1_test_lib:hex_to_bin(v(Erule, Type, Value)).
+
+v(Erule, 'SeqBig', Value) ->
+ v_seq_big(Erule, Value);
+v(Erule, 'EnumSkip', Value) ->
+ v_enum_skip(Erule, Value).
+
+v_seq_big(ber, {'SeqBig',true,e40,9357}) -> "300A8001 FF810141 8202248D";
+v_seq_big(ber, {'SeqBig',true,e80,9357}) -> "300B8001 FF810200 81820224 8D";
+v_seq_big(per, {'SeqBig',true,e40,9357}) -> "E0014002 248D";
+v_seq_big(per, {'SeqBig',true,e80,9357}) -> "E0018002 248D";
+v_seq_big(uper, {'SeqBig',true,e40,9357}) -> "E0280044 91A0";
+v_seq_big(uper, {'SeqBig',true,e80,9357}) -> "E0300044 91A0".
+
+v_enum_skip(per, d) -> "82";
+v_enum_skip(uper, d) -> "82";
+v_enum_skip(ber, d) -> "0A0103".
diff --git a/lib/asn1/test/testExtensibilityImplied.erl b/lib/asn1/test/testExtensibilityImplied.erl
new file mode 100644
index 0000000000..8049bb6e53
--- /dev/null
+++ b/lib/asn1/test/testExtensibilityImplied.erl
@@ -0,0 +1,29 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+
+-module(testExtensibilityImplied).
+-export([main/0]).
+
+main() ->
+ M = 'ExtensibilityImplied',
+ {'Seq2',true} = M:decode('Seq2', M:encode('Seq1', {'Seq1',true,42})),
+ {'Set2',true} = M:decode('Set2', M:encode('Set1', {'Set1',true,42})),
+ {asn1_enum,_} = M:decode('Enum2', M:encode('Enum1', ext)),
+ ok.
diff --git a/lib/asn1/test/testImporting.erl b/lib/asn1/test/testImporting.erl
new file mode 100644
index 0000000000..de8beae38b
--- /dev/null
+++ b/lib/asn1/test/testImporting.erl
@@ -0,0 +1,34 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+
+-module(testImporting).
+-export([main/0]).
+
+main() ->
+ M = 'Importing',
+ roundtrip('Seq', {'Seq',5}),
+ roundtrip('OtherSeq', {'Seq',42,true}),
+ {'Seq',42,true} = M:seq(),
+ roundtrip('ObjSeq', {'ObjSeq',1,<<"XYZ">>}),
+ roundtrip('ObjSeq', {'ObjSeq',2,19}),
+ ok.
+
+roundtrip(Type, Value) ->
+ asn1_test_lib:roundtrip('Importing', Type, Value).
diff --git a/lib/asn1/test/testInfObj.erl b/lib/asn1/test/testInfObj.erl
index 37c134b1b9..3044d5cd2a 100644
--- a/lib/asn1/test/testInfObj.erl
+++ b/lib/asn1/test/testInfObj.erl
@@ -74,6 +74,12 @@ main(_Erule) ->
{'ConstructedPdu',7,[]}),
roundtrip('InfObj', 'ConstructedPdu',
{'ConstructedPdu',7,[64,1,19,17,35]}),
+ {'ConstructedPdu',8,[{_,-15,35},{_,533,-70}]} =
+ enc_dec('InfObj', 'ConstructedPdu',
+ {'ConstructedPdu',8,[{'_',-15,35},{'_',533,-70}]}),
+ {'ConstructedPdu',9,[{RecTag9,-15,35},{RecTag9,533,-70}]} =
+ enc_dec('InfObj', 'ConstructedPdu',
+ {'ConstructedPdu',9,[{'_',-15,35},{'_',533,-70}]}),
roundtrip('InfObj', 'ConstructedSet',
{'ConstructedSet',1,{'CONSTRUCTED-DEFAULT_Type',-2001,true}}),
@@ -96,6 +102,12 @@ main(_Erule) ->
{'ConstructedSet',7,[]}),
roundtrip('InfObj', 'ConstructedSet',
{'ConstructedSet',7,[64,1,19,17,35]}),
+ {'ConstructedSet',8,[{_,-15,35},{_,533,-70}]} =
+ enc_dec('InfObj', 'ConstructedSet',
+ {'ConstructedSet',8,[{'_',-15,35},{'_',533,-70}]}),
+ {'ConstructedSet',9,[{_,-15,35},{_,533,-70}]} =
+ enc_dec('InfObj', 'ConstructedSet',
+ {'ConstructedSet',9,[{'_',-15,35},{'_',533,-70}]}),
roundtrip('InfObj', 'Seq2',
{'Seq2',42,[true,false,false,true],
@@ -126,12 +138,37 @@ main(_Erule) ->
test_objset('OstSeq45', [4,5]),
test_objset('OstSeq12345', [1,2,3,4,5]),
+ test_objset('OstSeq12Except', [1,2]),
+ test_objset('OstSeq123Except', [1,2]),
+
test_objset('ExOstSeq12', [1,2]),
test_objset('ExOstSeq123', [1,2,3]),
- %%test_objset('ExOstSeq1234', [1,2,3,4]),
+ test_objset('ExOstSeq1234', [1,2,3,4]),
test_objset('ExOstSeq45', [4,5]),
test_objset('ExOstSeq12345', [1,2,3,4,5]),
+ test_objset('ExOstSeq12Except', [1,2]),
+ test_objset('ExOstSeq123Except', [1,2]),
+
+ roundtrip('InfObj', 'ExtClassSeq', {'ExtClassSeq', 4}),
+
+ {1,2,42} = 'InfObj':'value-1'(),
+ {1,2,42,25} = 'InfObj':'value-2'(),
+ {100,101} = 'InfObj':'value-3'(),
+ {1,2,100,101} = 'InfObj':'value-4'(),
+
+ roundtrip('InfObj', 'Rdn', {'Rdn',{2,5,4,41},"abc"}),
+
+ roundtrip('InfObj', 'TiAliasSeq',
+ {'TiAliasSeq',{'TiAliasSeq_prf',{2,1,2},'NULL'}}),
+
+ roundtrip('InfObj', 'ContentInfo',
+ {'ContentInfo',{2,7,8,9},"string"}),
+ {2,7,8,9} = 'InfObj':'id-content-type'(),
+
+ <<2#1011:4>> = 'InfObj':'tricky-bit-string'(),
+ <<16#CAFE:16>> = 'InfObj':'tricky-octet-string'(),
+
ok.
test_objset(Type, Keys) ->
diff --git a/lib/asn1/test/testInfObjExtract.erl b/lib/asn1/test/testInfObjExtract.erl
new file mode 100644
index 0000000000..0ef967c1f6
--- /dev/null
+++ b/lib/asn1/test/testInfObjExtract.erl
@@ -0,0 +1,72 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+
+-module(testInfObjExtract).
+
+-export([main/0]).
+
+main() ->
+ roundtrip_data_object_13('DataSeq-1'),
+
+ roundtrip_data_object_1('DataSeq-2'),
+ roundtrip_data_object_1('DataSeq-3'),
+ roundtrip_data_object_1('DataSeq-4'),
+
+ roundtrip_data_object_13('DataSeq-5'),
+ roundtrip_data_object_13('DataSeq-6'),
+
+ roundtrip_data_object_1('DataSeqSingleSet-1'),
+ roundtrip_data_object_1('DataSeqSingleSet-2'),
+
+ roundtrip('ObjClassSeq-1', {'ObjClassSeq-1',1,true}),
+ roundtrip('ObjClassSeq-1', {'ObjClassSeq-1',2,true}),
+
+ roundtrip_error('ObjClassSeq-1', {'ObjClassSeq-1',0,false}),
+ roundtrip_error('ObjClassSeq-1', {'ObjClassSeq-1',3,true}),
+ roundtrip_error('ObjClassSeq-1', {'ObjClassSeq-1',4,false}),
+ roundtrip_error('ObjClassSeq-1', {'ObjClassSeq-1',5,true}),
+
+ ok.
+
+roundtrip_data_object_13(SeqType) ->
+ roundtrip(SeqType, {SeqType,1,true}),
+ roundtrip(SeqType, {SeqType,2,<<"abc">>}),
+ roundtrip(SeqType, {SeqType,3,<<42:5>>}),
+ roundtrip_error(SeqType, {SeqType,4,42}).
+
+roundtrip_data_object_1(SeqType) ->
+ roundtrip(SeqType, {SeqType,1,false}),
+ roundtrip(SeqType, {SeqType,1,true}),
+ roundtrip_error(SeqType, {SeqType,1,42}),
+ roundtrip_error(SeqType, {SeqType,2,<<"abc">>}),
+ roundtrip_error(SeqType, {SeqType,3,<<42:5>>}),
+ roundtrip_error(SeqType, {SeqType,999,42}).
+
+roundtrip(T, V) ->
+ asn1_test_lib:roundtrip('InfObjExtract', T, V).
+
+roundtrip_error(T, V) ->
+ try asn1_test_lib:roundtrip('InfObjExtract', T, V) of
+ ok ->
+ test_server:fail()
+ catch
+ _:_ ->
+ ok
+ end.
diff --git a/lib/asn1/test/testParamBasic.erl b/lib/asn1/test/testParamBasic.erl
index 39f7947e8d..5f6116bba4 100644
--- a/lib/asn1/test/testParamBasic.erl
+++ b/lib/asn1/test/testParamBasic.erl
@@ -46,6 +46,14 @@ main(Rules) ->
roundtrip('AnAlgorithm', {'AnAlgorithm',1,42}),
roundtrip('AnAlgorithm', {'AnAlgorithm',2,true}),
roundtrip('AnAlgorithm', {'AnAlgorithm',2,false}),
+ {'AnAlgorithm',1,42} = 'ParamBasic':'alg-seq-1'(),
+ {'AnAlgorithm',2,true} = 'ParamBasic':'alg-seq-2'(),
+
+ roundtrip('Seq', {'Seq',
+ {'Seq_c1',{2,1,1},42},
+ {'Seq_c2',{2,1,1,1},asn1_NOVALUE}}),
+
+ {_,{2,9,9,9,7},'NULL'} = 'ParamBasic':'algid-hmacWithSHA1'(),
ok.
roundtrip(Type, Value) ->
diff --git a/lib/asn1/test/testPrim.erl b/lib/asn1/test/testPrim.erl
index e07379e634..d7893a2d58 100644
--- a/lib/asn1/test/testPrim.erl
+++ b/lib/asn1/test/testPrim.erl
@@ -98,6 +98,11 @@ enum(Rules) ->
ber ->
ok
end,
+
+ roundtrip('NegEnumVal', neg),
+ roundtrip('NegEnumVal', zero),
+ roundtrip('EnumVal128', val),
+
ok.
diff --git a/lib/asn1/test/testRfcs.erl b/lib/asn1/test/testRfcs.erl
new file mode 100644
index 0000000000..6281d09873
--- /dev/null
+++ b/lib/asn1/test/testRfcs.erl
@@ -0,0 +1,75 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+
+-module(testRfcs).
+
+-export([compile/3,test/0]).
+
+-include_lib("test_server/include/test_server.hrl").
+
+compile(Config, Erules, Options0) ->
+ Options = [no_ok_wrapper|Options0],
+ DataDir = ?config(data_dir, Config),
+ Specs0 = filelib:wildcard("*.asn1", filename:join(DataDir, rfcs)),
+ Specs = [filename:join(rfcs, Spec) || Spec <- Specs0],
+ 122 = length(Specs),
+ CaseDir = ?config(case_dir, Config),
+ asn1_test_lib:compile_all(Specs, Config, [Erules,{i,CaseDir}|Options]).
+
+test() ->
+ {1,3,6,1,5,5,7,48,1,2} =
+ IdPkixOcspNonce =
+ 'OCSP-2009':'id-pkix-ocsp-nonce'(),
+ roundtrip('OCSP-2009', 'OCSPRequest',
+ {'OCSPRequest',
+ {'TBSRequest',
+ 0,
+ {rfc822Name,"name string"},
+ [{'Request',
+ {'CertID',{'_',{2,9,3,4,5},asn1_NOVALUE},
+ <<"POTATOHASH">>,<<"HASHBROWN">>,42},
+ [{'_',IdPkixOcspNonce,true,<<34,159,16,57,199>>}]}],
+ asn1_NOVALUE},
+ asn1_NOVALUE}),
+ otp_7759(),
+ ok.
+
+roundtrip(Module, Type, Value0) ->
+ Enc = Module:encode(Type, Value0),
+ Value1 = Module:decode(Type, Enc),
+ asn1_test_lib:match_value(Value0, Value1),
+ ok.
+
+otp_7759() ->
+ %% The release note for asn-1.6.6 says:
+ %% Decode of an open_type when the value was empty tagged
+ %% type encoded with indefinite length failed.
+ Mod = 'OLD-PKCS7',
+ Encoded = encoded_msg(),
+ ContentInfo = Mod:decode('ContentInfo', Encoded),
+ io:format("~p\n", [ContentInfo]),
+ {'ContentInfo',_Id,PKCS7_content} = ContentInfo,
+ X = Mod:decode('SignedData', PKCS7_content),
+ io:format("~p\n", [X]),
+ io:nl(),
+ ok.
+
+encoded_msg() ->
+ <<48,128,6,9,42,134,72,134,247,13,1,7,2,160,128,48,128,2,1,1,49,11,48,9,6,5,43,14,3,2,26,5,0,48,128,6,9,42,134,72,134,247,13,1,7,1,160,128,36,128,0,0,0,0,0,0, 49,130,1,192,48,130,1,188,2,1,1,48,50,48,38,49,17,48,15,6,3,85,4,3,12,8,65,100,109,105,110,67,65,49,49,17,48,15,6,3,85,4,10,12,8,69,82,73,67,83,83,79,78,2,8,15,151,245,186,21,23,240,96,48,9,6,5,43,14,3,2,26,5,0,160,129,229,48,17,6,10,96,134,72,1,134,248,69,1,9,2,49,3,19,1,51,48,17,6,10,96,134,72,1,134,248,69,1,9,3,49,3,19,1,51,48,24,6,9,42,134,72,134,247,13,1,9,3,49,11,6,9,42,134,72,134,247,13,1,7,1,48,28,6,9,42,134,72,134,247,13,1,9,5,49,15,23,13,48,56,49,50,49,48,48,57,53,52,50,51,90,48,28,6,10,96,134,72,1,134,248,69,1,9,7,49,14,19,12,49,53,50,56,49,52,50,52,48,57,53,53,48,32,6,10,96,134,72,1,134,248,69,1,9,5,49,18,4,16,165,115,177,71,78,88,239,113,78,56,98,98,18,202,217,235,48,32,6,10,96,134,72,1,134,248,69,1,9,6,49,18,4,16,227,174,230,251,43,153,252,65,11,93,231,83,34,18,55,46,48,35,6,9,42,134,72,134,247,13,1,9,4,49,22,4,20,218,57,163,238,94,107,75,13,50,85,191,239,149,96,24,144,175,216,7,9,48,13,6,9,42,134,72,134,247,13,1,1,1,5,0,4,129,128,106,233,116,125,140,51,133,173,63,41,54,138,214,211,89,215,169,125,98,77,16,222,216,240,211,79,125,111,87,186,73,63,253,204,107,102,177,63,174,197,224,212,231,172,149,246,33,68,223,67,102,93,64,152,152,5,216,102,247,134,36,197,150,236,57,77,56,138,95,71,204,31,23,149,241,213,78,172,165,249,100,187,12,45,19,57,67,120,54,63,15,239,41,217,127,61,254,60,201,104,68,3,135,214,206,93,253,255,192,94,56,107,68,210,57,61,41,249,47,156,130,244,52,12,163,216,236,69,0,0,0,0,0,0>>.
diff --git a/lib/asn1/test/testSelectionTypes.erl b/lib/asn1/test/testSelectionTypes.erl
index 6d060321da..7d273fe656 100644
--- a/lib/asn1/test/testSelectionTypes.erl
+++ b/lib/asn1/test/testSelectionTypes.erl
@@ -23,10 +23,34 @@
-include_lib("test_server/include/test_server.hrl").
test() ->
- Val = ["PrintableString","PrintableString","PrintableString"],
["Es"] = Val2 = ['SelectionType':einsteinium()],
- roundtrip('MendeleyevTable', Val),
+ roundtrip('MendeleyevTable', ["fox","tree","cat","stone"]),
roundtrip('MendeleyevTable', Val2),
+ roundtrip('MendeleyevSet', [42,57,93,101]),
+
+ M = 'SelectionType',
+ true = M:boolv(),
+ 4 = M:intv(),
+ <<2#1001:4>> = M:bsv(),
+ <<16#3130:16>> = M:osv(),
+ 'NULL' = M:nullv(),
+ {2,1,1} = M:oiv(),
+ "ObjectDesc" = M:odv(),
+ "utf8" = M:utfv(),
+ {5,32767,256} = M:rov(),
+ "089" = M:numsv(),
+ "telet" = M:teletv(),
+ "t61" = M:t61v(),
+ "video" = M:videov(),
+ "ia5" = M:ia5v(),
+ "9805281429Z" = M:utctimev(),
+ "19980528142905.1" = M:gTime(),
+ "graphic" = M:gsv(),
+ "visible" = M:vsv(),
+ "general" = M:gStringv(),
+ "Universal" = M:univv(),
+ "bmp" = M:bmov(),
+
ok.
roundtrip(T, V) ->
diff --git a/lib/asn1/test/testUniqueObjectSets.erl b/lib/asn1/test/testUniqueObjectSets.erl
new file mode 100644
index 0000000000..1ef61a885a
--- /dev/null
+++ b/lib/asn1/test/testUniqueObjectSets.erl
@@ -0,0 +1,175 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+
+-module(testUniqueObjectSets).
+-export([main/3]).
+
+%% Run-time function called by the generated code.
+seq_roundtrip(I, D0) ->
+ M = 'UniqueObjectSets',
+ try
+ {ok,Enc} = M:encode('Seq', {'Seq',I,D0}),
+ {ok,{'Seq',I,D}} = M:decode('Seq', Enc),
+ D
+ catch C:E ->
+ Stk = erlang:get_stacktrace(),
+ io:format("FAILED: ~p ~p\n", [I,D0]),
+ erlang:raise(C, E, Stk)
+ end.
+
+types() ->
+ [{"CHOICE { a INTEGER, b BIT STRING }", {b,<<42:3>>}},
+ {"INTEGER",42},
+ {"SEQUENCE {a OCTET STRING}",{'_',<<"abc">>}},
+ {"SEQUENCE {b BOOLEAN, ...}",{'_',true}},
+ {"SEQUENCE {b BOOLEAN, ..., s IA5String, ..., e ENUMERATED { x, y, z}}",
+ {'_',false,"string",y}},
+ {"SET {a BIT STRING}",{'_',<<1:17>>}},
+ {"SEQUENCE OF INTEGER",[-19,0,555,777]},
+ {"SET OF BOOLEAN",[true,false,true]},
+ {"SEQUENCE OF SEQUENCE {x INTEGER (0..7)}",[{'_',7},{'_',0}]},
+ {"SET OF SEQUENCE {x INTEGER (0..7)}",[{'_',7},{'_',0}]}
+ ].
+
+main(CaseDir, Rule, Opts) ->
+ D0 = types(),
+ {D1,_} = lists:mapfoldl(fun({T,S}, I) ->
+ {{I,T,S},I+1}
+ end, 1, D0),
+ Types = [gen_types(I, Type) || {I,Type,_} <- D1],
+ Set = [gen_set_items(I, T) || {I,T,_} <- D1],
+ Objs = [gen_obj(I) || {I,_,_} <- D1],
+ DupObjs = [gen_dup_obj(I, T) || {I,T,_} <- D1],
+ DupObjRefs0 = [gen_dup_obj_refs(I) || {I,_,_} <- D1],
+ DupObjRefs = string:join(DupObjRefs0, " |\n"),
+ Asn1Spec = 'UniqueObjectSets',
+ A = ["UniqueObjectSets DEFINITIONS AUTOMATIC TAGS ::=\n",
+ "BEGIN\n\n",
+ "TEST-UNIQUE ::= CLASS {\n"
+ " &id INTEGER UNIQUE,\n"
+ " &Type OPTIONAL\n"
+ "}\n"
+ "WITH SYNTAX {IDENTIFIED BY &id [TYPE &Type]}\n",
+ $\n,
+ "DUP-CONTAINER ::= CLASS {\n"
+ " &id INTEGER UNIQUE,\n"
+ " &data TEST-UNIQUE\n"
+ "} WITH SYNTAX {\n"
+ " ID &id, &data\n"
+ "}\n",
+ $\n,
+ Types,$\n,
+ "UniqSet TEST-UNIQUE ::= {\n",
+ Set,
+ " DupSet-1 |\n",
+ " DupSet-2, ...\n",
+ "}\n\n",
+ Objs,$\n,
+ DupObjs,$\n,
+ "DupSet-1 TEST-UNIQUE ::= {\n",
+ DupObjRefs,$\n,
+ "}\n\n",
+ "DupSet-2 TEST-UNIQUE ::= {\n",
+ DupObjRefs,",...\n",
+ "}\n\n",
+ "Seq ::= SEQUENCE {\n"
+ " id TEST-UNIQUE.&id ({UniqSet}),\n"
+ " type TEST-UNIQUE.&Type ({UniqSet}{@id})\n"
+ "}\n"
+ "END\n"],
+ Asn1File = filename:join(CaseDir, atom_to_list(Asn1Spec)++".asn1"),
+ ok = file:write_file(Asn1File, A),
+
+ TestModule = 'unique_object_sets',
+ Test0 = [gen_test(I, Data) || {I,_,Data} <- D1],
+ Test = ["-module(",atom_to_list(TestModule),").\n"
+ "-export([main/1]).\n"
+ "\n"
+ "main(SeqRoundtrip) ->\n",
+ " ",atom_to_list(Rule)," = '",atom_to_list(Asn1Spec),
+ "':encoding_rule(),\n",
+ Test0,
+ " ok.\n"
+ ],
+ ErlFile = filename:join(CaseDir, atom_to_list(TestModule)++".erl"),
+ ok = file:write_file(ErlFile, Test),
+
+ io:format("~s\n~s\n", [Asn1File,ErlFile]),
+ case Rule of
+ per ->
+ io:put_chars([A,$\n,Test,$\n]);
+ _ ->
+ ok
+ end,
+
+ ok = asn1ct:compile(Asn1File, [Rule,{outdir,CaseDir}|Opts]),
+ {ok,TestModule} = c:c(ErlFile, [{outdir,CaseDir}]),
+ TestModule:main(fun seq_roundtrip/2),
+ ok.
+
+gen_types(I, Type) ->
+ io_lib:format("AType~p ::= ~s\n", [I,Type]).
+
+gen_set_items(I, T) ->
+ io_lib:format(" {IDENTIFIED BY ~p TYPE AType~p} |\n"
+ " {IDENTIFIED BY ~p TYPE AType~p} |\n"
+ " {IDENTIFIED BY ~p TYPE ~s} |\n"
+ " obj-~p |\n\n",
+ [I,I,I,I,I,T,I]).
+
+gen_obj(I) ->
+ io_lib:format("obj-~p TEST-UNIQUE ::= {IDENTIFIED BY ~p TYPE AType~p}\n",
+ [I,I,I]).
+
+gen_dup_obj(I, T) ->
+ io_lib:format("dup-obj-~p DUP-CONTAINER ::= "
+ "{ID ~p, {IDENTIFIED BY ~p TYPE ~s}}\n",
+ [I,I,I+1000,T]).
+
+gen_dup_obj_refs(I) ->
+ io_lib:format("dup-obj-~p.&data", [I]).
+
+gen_test(I, Data) ->
+ io_lib:format(" ~s = SeqRoundtrip(~p, ~p),\n",
+ [match_term(Data),I,Data]).
+
+match_term('_') ->
+ "_";
+match_term([H|T]=L) ->
+ case is_intlist(L) of
+ true ->
+ io_lib:format("~p", [L]);
+ false ->
+ ["[",match_term(H),"|",match_term(T),"]"]
+ end;
+match_term(Tuple) when is_tuple(Tuple) ->
+ ["{",match_term_tuple(Tuple, 1),"}"];
+match_term(Other) ->
+ io_lib:format("~p", [Other]).
+
+match_term_tuple(T, I) when I =< tuple_size(T) ->
+ [match_term(element(I, T)),
+ if I < tuple_size(T) -> ",";
+ true -> "" end|match_term_tuple(T, I+1)];
+match_term_tuple(_, _) ->
+ [].
+
+is_intlist(L) ->
+ lists:all(fun is_integer/1, L).
diff --git a/lib/asn1/test/testValueTest.erl b/lib/asn1/test/testValueTest.erl
new file mode 100644
index 0000000000..8a8e973621
--- /dev/null
+++ b/lib/asn1/test/testValueTest.erl
@@ -0,0 +1,114 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+-module(testValueTest).
+
+-export([main/0]).
+
+main() ->
+ M = 'ValueTest',
+
+ %% Basic types
+ 12 = M:'vANY'(),
+ true = M:'vBOOLEAN'(),
+ 12 = M:'vINTEGER'(),
+ 0 = M:'vINTEGERNNL'(),
+ button1 = M:'vENUMERATED'(),
+ [zero,two] = M:'vBS'(),
+ 'NULL' = M:'vNULL'(),
+ <<16#31,16#32,16#33>> = M:'vOS'(),
+
+ %% OID
+ {2,1,1} = M:'vOD'(),
+ {1,2} = M:'integer-first'(),
+ {2,4,5} = M:'rel-oid-1'(),
+ {0,2,4,5} = M:'include-roid'(),
+ {1,2,1} = M:'include-oid'(),
+ {1,2,1,2,4,5,42} = M:'include-all'(),
+
+ %% Character strings
+ "01234567" = M:'numericstring'(),
+ "PrintableString" = M:'printablestring'(),
+ "VisibleString" = M:'visiblestring'(),
+ [0,13] = M:'cr'(),
+ ["First line",[0,13],"Second line"] = M:'ia5string1'(),
+ [[5,5],[4,4],[6,6]] = M:'ia5string2'(),
+ "TeletexString" = M:'teletexstring'(),
+ "VideotexString" = M:'videotexstring'(),
+ "97100211-0500" = M:'utctime'(),
+ "19971002103130.5" = M:'generalizedtime'(),
+ "ObjectDescriptor" = M:'objectdescriptor'(),
+ "GraphicString" = M:'graphicstring'(),
+ "GeneralString" = M:'generalstring'(),
+ "BMPString" = M:'bmpstring1'(),
+ [0,0,0,65] = M:'latinCapitalLetterA'(),
+ [0,0,3,145] = M:'greekCapitalLetterSigma'(),
+ ["This is a capital A: ",
+ [0,0,0,65],
+ ", and a capital sigma: ",
+ [0,0,3,145],
+ "; try and spot the difference!"] = M:'my-universalstring'(),
+
+ %% Integers
+ 42 = M:someInteger(),
+ 42 = M:otherInteger(),
+ {'IntegerSeq',42} = M:integerSeq1(),
+
+ %% Value from object
+ 2 = M:'int-from-object-1'(),
+ 4 = M:'int-from-object-2'(),
+ roundtrip_error('II', 1),
+ roundtrip('II', 2),
+ roundtrip('II', 3),
+ roundtrip('II', 4),
+ roundtrip_error('II', 5),
+
+ %% Recursive value definitions.
+ {'OctetStringSeq',<<16#40,16#41,16#42>>} = M:octetStringSeq1(),
+ <<16#40,16#41,16#42>> = M:otherOctetString(),
+ <<16#40,16#41,16#42>> = M:someOctetString(),
+ {'OctetStringSeq',<<16#40,16#41,16#42>>} = M:octetStringSeq2(),
+ {'OctetStringSeq',<<16#40,16#41,16#FF>>} = M:octetStringSeq3(),
+ <<16#40,16#41,16#FF>> = M:'os-1'(),
+ <<16#40,16#41,16#FF>> = M:'os-2'(),
+
+ %% Recursive BIT STRING definitions.
+ {'BsSeq',<<2#101101:6>>,[c]} = M:bsSeq1(),
+ {'BsSeq',<<2#101101:6>>,[c]} = M:bsSeq2(),
+ {'BsSeq',<<2#101:3>>,[a,c]} = M:bsSeq3(),
+ <<2#101101:6>> = M:someBitString(),
+ <<2#101101:6>> = M:otherBitString(),
+ <<2#101:3>> = M:bsFromObject(),
+ <<2#101:3>> = M:bsFromObjectInd(),
+ [c] = M:someNamedBs(),
+ [c] = M:someOtherNamedBs(),
+
+ ok.
+
+
+roundtrip(T, V) ->
+ asn1_test_lib:roundtrip('ValueTest', T, V).
+
+roundtrip_error(T, V) ->
+ try asn1_test_lib:roundtrip('ValueTest', T, V) of
+ ok ->
+ test_server:fail()
+ catch _:_ ->
+ ok
+ end.
diff --git a/lib/asn1/test/testX420.erl b/lib/asn1/test/testX420.erl
deleted file mode 100644
index 4ddc55dc16..0000000000
--- a/lib/asn1/test/testX420.erl
+++ /dev/null
@@ -1,93 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2008-2013. All Rights Reserved.
-%%
-%% The contents of this file are subject to the Erlang Public License,
-%% Version 1.1, (the "License"); you may not use this file except in
-%% compliance with the License. You should have received a copy of the
-%% Erlang Public License along with this software. If not, it can be
-%% retrieved online at http://www.erlang.org/.
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% %CopyrightEnd%
-%%
-%%
-%%-------------------------------------------------------------------
-
--module(testX420).
-
--export([compile/3, ticket7759/2]).
-
--include_lib("test_server/include/test_server.hrl").
-
-
-compile(Erule, Options, Config) ->
- Specs0 = specs(),
- 99 = length(Specs0),
- CaseDir = ?config(case_dir, Config),
- Specs = [filename:join(x420, Spec) || Spec <- Specs0],
- asn1_test_lib:compile_all(Specs, Config, [Erule,{i,CaseDir}|Options]).
-
-specs() ->
- ["ACSE-1", "AuthenticationFramework", "BasicAccessControl",
- "CertificateExtensions", "Character-Coding-Attributes",
- "Character-Presentation-Attributes", "Character-Profile-Attributes",
- "Colour-Attributes", "DOR-definition", "DSAOperationalAttributeTypes",
- "Default-Value-Lists", "DirectoryAbstractService",
- "DirectoryAccessProtocol", "DirectoryInformationShadowProtocol",
- "DirectoryOperationalBindingManagementProtocol",
- "DirectoryOperationalBindingTypes", "DirectoryProtectionMappings",
- "DirectoryShadowAbstractService", "DirectorySystemProtocol",
- "DistributedOperations", "Document-Profile-Descriptor",
- "EnhancedSecurity", "External-References", "GULSProtectionMappings",
- "GenericProtectingTransferSyntax", "Geo-Gr-Coding-Attributes",
- "Geo-Gr-Presentation-Attributes", "Geo-Gr-Profile-Attributes",
- "GulsSecurityExchanges", "GulsSecurityTransformations",
- "HierarchicalOperationalBindings", "IPMSAbstractService",
- "IPMSAutoActionTypes", "IPMSExtendedBodyPartTypes",
- "IPMSExtendedBodyPartTypes2", "IPMSExtendedVoiceBodyPartType",
- "IPMSFileTransferBodyPartType", "IPMSForwardedContentBodyPartType",
- "IPMSForwardedReportBodyPartType", "IPMSFunctionalObjects",
- "IPMSHeadingExtensions", "IPMSInformationObjects",
- "IPMSMessageStoreAttributes", "IPMSObjectIdentifiers",
- "IPMSObjectIdentifiers2", "IPMSSecurityExtensions", "IPMSUpperBounds",
- "ISO-STANDARD-9541-FONT-ATTRIBUTE-SET", "ISO8571-FTAM", "ISO9541-SN",
- "Identifiers-and-Expressions", "InformationFramework",
- "Interchange-Data-Elements", "Layout-Descriptors", "Link-Descriptors",
- "Location-Expressions", "Logical-Descriptors", "MHSObjectIdentifiers",
- "MHSProtocolObjectIdentifiers", "MSAbstractService",
- "MSAccessProtocol", "MSGeneralAttributeTypes",
- "MSGeneralAutoActionTypes", "MSMatchingRules", "MSObjectIdentifiers",
- "MSUpperBounds", "MTAAbstractService", "MTSAbstractService",
- "MTSAbstractService88", "MTSAccessProtocol", "MTSObjectIdentifiers",
- "MTSUpperBounds", "Notation", "ObjectIdentifiers",
- "OperationalBindingManagement", "PKCS7", "PKCS7BodyPartType",
- "Protected-Part-Descriptors", "ProtocolObjectIdentifiers",
- "Raster-Gr-Coding-Attributes", "Raster-Gr-Presentation-Attributes",
- "Raster-Gr-Profile-Attributes", "Reliable-Transfer-APDU",
- "Remote-Operations-Abstract-Syntaxes",
- "Remote-Operations-Generic-ROS-PDUs",
- "Remote-Operations-Information-Objects-extensions",
- "Remote-Operations-Information-Objects",
- "Remote-Operations-Realizations",
- "Remote-Operations-Useful-Definitions", "SelectedAttributeTypes",
- "SeseAPDUs", "SpkmGssTokens", "Style-Descriptors", "Subprofiles",
- "Temporal-Relationships", "Text-Units", "UpperBounds",
- "UsefulDefinitions", "Videotex-Coding-Attributes"].
-
-ticket7759(_Erule,_Config) ->
- Encoded = encoded_msg(),
- io:format("Testing ticket7759 ...~n",[]),
- {ok, ContentInfo} = 'PKCS7':decode('ContentInfo',Encoded),
- {'ContentInfo',_Id,PKCS7_content} = ContentInfo,
- {ok,_} = 'PKCS7':decode('SignedData',PKCS7_content),
- ok.
-
-
-encoded_msg() ->
- <<48,128,6,9,42,134,72,134,247,13,1,7,2,160,128,48,128,2,1,1,49,11,48,9,6,5,43,14,3,2,26,5,0,48,128,6,9,42,134,72,134,247,13,1,7,1,160,128,36,128,0,0,0,0,0,0, 49,130,1,192,48,130,1,188,2,1,1,48,50,48,38,49,17,48,15,6,3,85,4,3,12,8,65,100,109,105,110,67,65,49,49,17,48,15,6,3,85,4,10,12,8,69,82,73,67,83,83,79,78,2,8,15,151,245,186,21,23,240,96,48,9,6,5,43,14,3,2,26,5,0,160,129,229,48,17,6,10,96,134,72,1,134,248,69,1,9,2,49,3,19,1,51,48,17,6,10,96,134,72,1,134,248,69,1,9,3,49,3,19,1,51,48,24,6,9,42,134,72,134,247,13,1,9,3,49,11,6,9,42,134,72,134,247,13,1,7,1,48,28,6,9,42,134,72,134,247,13,1,9,5,49,15,23,13,48,56,49,50,49,48,48,57,53,52,50,51,90,48,28,6,10,96,134,72,1,134,248,69,1,9,7,49,14,19,12,49,53,50,56,49,52,50,52,48,57,53,53,48,32,6,10,96,134,72,1,134,248,69,1,9,5,49,18,4,16,165,115,177,71,78,88,239,113,78,56,98,98,18,202,217,235,48,32,6,10,96,134,72,1,134,248,69,1,9,6,49,18,4,16,227,174,230,251,43,153,252,65,11,93,231,83,34,18,55,46,48,35,6,9,42,134,72,134,247,13,1,9,4,49,22,4,20,218,57,163,238,94,107,75,13,50,85,191,239,149,96,24,144,175,216,7,9,48,13,6,9,42,134,72,134,247,13,1,1,1,5,0,4,129,128,106,233,116,125,140,51,133,173,63,41,54,138,214,211,89,215,169,125,98,77,16,222,216,240,211,79,125,111,87,186,73,63,253,204,107,102,177,63,174,197,224,212,231,172,149,246,33,68,223,67,102,93,64,152,152,5,216,102,247,134,36,197,150,236,57,77,56,138,95,71,204,31,23,149,241,213,78,172,165,249,100,187,12,45,19,57,67,120,54,63,15,239,41,217,127,61,254,60,201,104,68,3,135,214,206,93,253,255,192,94,56,107,68,210,57,61,41,249,47,156,130,244,52,12,163,216,236,69,0,0,0,0,0,0>>.
diff --git a/lib/asn1/test/test_compile_options.erl b/lib/asn1/test/test_compile_options.erl
index 7f358e863c..4b6357a395 100644
--- a/lib/asn1/test/test_compile_options.erl
+++ b/lib/asn1/test/test_compile_options.erl
@@ -24,7 +24,7 @@
-export([wrong_path/1,comp/2,path/1,ticket_6143/1,noobj/1,
- record_name_prefix/1,verbose/1,warnings_as_errors/1]).
+ record_name_prefix/1,verbose/1]).
%% OTP-5689
wrong_path(Config) ->
@@ -132,43 +132,6 @@ verbose(Config) when is_list(Config) ->
?line [] = test_server:capture_get(),
ok.
-warnings_as_errors(Config) when is_list(Config) ->
- PrivDir = ?config(priv_dir,Config),
- Asn1File = filename:join([PrivDir,"WERROR.asn1"]),
- OutFile = filename:join([PrivDir,"WERROR.erl"]),
- Opts = [{outdir,PrivDir},noobj,verbose],
-
- %% Generate WERR.asn to emit warning
- %% Warning: Wrong format of type/value
- %% false/{'Externalvaluereference',_,'WERR',noInvokeId}
- Warn = <<"WERROR DEFINITIONS IMPLICIT TAGS ::=\n"
- "\n"
- "BEGIN\n"
- "\n"
- "InvokeId ::= CHOICE\n"
- "{\n"
- " present INTEGER,\n"
- " absent NULL\n"
- "}\n"
- "\n"
- "noInvokeId InvokeId ::= absent:NULL\n"
- "\n"
- "NoInvokeId InvokeId ::= {noInvokeId}\n"
- "\n"
- "END -- end of useful definitions.\n">>,
- ?line ok = file:write_file(Asn1File, Warn),
-
- %% Test warnings_as_errors compile
- ?line false = filelib:is_regular(OutFile),
- ?line {error, _} = asn1ct:compile(Asn1File, [warnings_as_errors|Opts]),
- ?line false = filelib:is_regular(OutFile),
-
- %% Test normal compile
- ?line ok = asn1ct:compile(Asn1File, Opts),
- ?line true = filelib:is_regular(OutFile),
- ?line ok = file:delete(OutFile),
- ok.
-
outfiles_check(OutDir) ->
outfiles_check(OutDir,outfiles1()).
diff --git a/lib/common_test/src/ct_cover.erl b/lib/common_test/src/ct_cover.erl
index c7f446dee9..b630a51835 100644
--- a/lib/common_test/src/ct_cover.erl
+++ b/lib/common_test/src/ct_cover.erl
@@ -174,7 +174,7 @@ get_spec_test(File) ->
[] -> [#cover{app=none, level=details}];
_ -> Res
end,
- case get_cover_opts(Apps, Terms, []) of
+ case get_cover_opts(Apps, Terms, Dir, []) of
E = {error,_} ->
E;
[CoverSpec] ->
@@ -205,124 +205,125 @@ collect_apps([], Apps) ->
%% get_cover_opts(Terms) -> AppCoverInfo
%% AppCoverInfo: [#cover{app=App,...}]
-get_cover_opts([App | Apps], Terms, CoverInfo) ->
- case get_app_info(App, Terms) of
+get_cover_opts([App | Apps], Terms, Dir, CoverInfo) ->
+ case get_app_info(App, Terms, Dir) of
E = {error,_} -> E;
AppInfo ->
AppInfo1 = files2mods(AppInfo),
- get_cover_opts(Apps, Terms, [AppInfo1|CoverInfo])
+ get_cover_opts(Apps, Terms, Dir, [AppInfo1|CoverInfo])
end;
-get_cover_opts([], _, CoverInfo) ->
+get_cover_opts([], _, _, CoverInfo) ->
lists:reverse(CoverInfo).
-%% get_app_info(App, Terms) -> App1
+%% get_app_info(App, Terms, Dir) -> App1
-get_app_info(App=#cover{app=none}, [{incl_dirs,Dirs}|Terms]) ->
- get_app_info(App, [{incl_dirs,none,Dirs}|Terms]);
-get_app_info(App=#cover{app=Name}, [{incl_dirs,Name,Dirs}|Terms]) ->
- case get_files(Dirs, ".beam", false, []) of
+get_app_info(App=#cover{app=none}, [{incl_dirs,Dirs}|Terms], Dir) ->
+ get_app_info(App, [{incl_dirs,none,Dirs}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{incl_dirs,Name,Dirs}|Terms], Dir) ->
+ case get_files(Dirs, Dir, ".beam", false, []) of
E = {error,_} -> E;
Mods1 ->
Mods = App#cover.incl_mods,
- get_app_info(App#cover{incl_mods=Mods++Mods1},Terms)
+ get_app_info(App#cover{incl_mods=Mods++Mods1},Terms,Dir)
end;
-get_app_info(App=#cover{app=none}, [{incl_dirs_r,Dirs}|Terms]) ->
- get_app_info(App, [{incl_dirs_r,none,Dirs}|Terms]);
-get_app_info(App=#cover{app=Name}, [{incl_dirs_r,Name,Dirs}|Terms]) ->
- case get_files(Dirs, ".beam", true, []) of
+get_app_info(App=#cover{app=none}, [{incl_dirs_r,Dirs}|Terms], Dir) ->
+ get_app_info(App, [{incl_dirs_r,none,Dirs}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{incl_dirs_r,Name,Dirs}|Terms], Dir) ->
+ case get_files(Dirs, Dir, ".beam", true, []) of
E = {error,_} -> E;
Mods1 ->
Mods = App#cover.incl_mods,
- get_app_info(App#cover{incl_mods=Mods++Mods1},Terms)
+ get_app_info(App#cover{incl_mods=Mods++Mods1},Terms,Dir)
end;
-get_app_info(App=#cover{app=none}, [{incl_mods,Mods1}|Terms]) ->
- get_app_info(App, [{incl_mods,none,Mods1}|Terms]);
-get_app_info(App=#cover{app=Name}, [{incl_mods,Name,Mods1}|Terms]) ->
+get_app_info(App=#cover{app=none}, [{incl_mods,Mods1}|Terms], Dir) ->
+ get_app_info(App, [{incl_mods,none,Mods1}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{incl_mods,Name,Mods1}|Terms], Dir) ->
Mods = App#cover.incl_mods,
- get_app_info(App#cover{incl_mods=Mods++Mods1},Terms);
+ get_app_info(App#cover{incl_mods=Mods++Mods1},Terms,Dir);
-get_app_info(App=#cover{app=none}, [{excl_dirs,Dirs}|Terms]) ->
- get_app_info(App, [{excl_dirs,none,Dirs}|Terms]);
-get_app_info(App=#cover{app=Name}, [{excl_dirs,Name,Dirs}|Terms]) ->
- case get_files(Dirs, ".beam", false, []) of
+get_app_info(App=#cover{app=none}, [{excl_dirs,Dirs}|Terms], Dir) ->
+ get_app_info(App, [{excl_dirs,none,Dirs}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{excl_dirs,Name,Dirs}|Terms], Dir) ->
+ case get_files(Dirs, Dir, ".beam", false, []) of
E = {error,_} -> E;
Mods1 ->
Mods = App#cover.excl_mods,
- get_app_info(App#cover{excl_mods=Mods++Mods1},Terms)
+ get_app_info(App#cover{excl_mods=Mods++Mods1},Terms,Dir)
end;
-get_app_info(App=#cover{app=none}, [{excl_dirs_r,Dirs}|Terms]) ->
- get_app_info(App, [{excl_dirs_r,none,Dirs}|Terms]);
-get_app_info(App=#cover{app=Name}, [{excl_dirs_r,Name,Dirs}|Terms]) ->
- case get_files(Dirs, ".beam", true, []) of
+get_app_info(App=#cover{app=none}, [{excl_dirs_r,Dirs}|Terms],Dir) ->
+ get_app_info(App, [{excl_dirs_r,none,Dirs}|Terms],Dir);
+get_app_info(App=#cover{app=Name}, [{excl_dirs_r,Name,Dirs}|Terms],Dir) ->
+ case get_files(Dirs, Dir, ".beam", true, []) of
E = {error,_} -> E;
Mods1 ->
Mods = App#cover.excl_mods,
- get_app_info(App#cover{excl_mods=Mods++Mods1},Terms)
+ get_app_info(App#cover{excl_mods=Mods++Mods1},Terms,Dir)
end;
-get_app_info(App=#cover{app=none}, [{excl_mods,Mods1}|Terms]) ->
- get_app_info(App, [{excl_mods,none,Mods1}|Terms]);
-get_app_info(App=#cover{app=Name}, [{excl_mods,Name,Mods1}|Terms]) ->
+get_app_info(App=#cover{app=none}, [{excl_mods,Mods1}|Terms], Dir) ->
+ get_app_info(App, [{excl_mods,none,Mods1}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{excl_mods,Name,Mods1}|Terms], Dir) ->
Mods = App#cover.excl_mods,
- get_app_info(App#cover{excl_mods=Mods++Mods1},Terms);
+ get_app_info(App#cover{excl_mods=Mods++Mods1},Terms,Dir);
-get_app_info(App=#cover{app=none}, [{cross,Cross}|Terms]) ->
- get_app_info(App, [{cross,none,Cross}|Terms]);
-get_app_info(App=#cover{app=Name}, [{cross,Name,Cross1}|Terms]) ->
+get_app_info(App=#cover{app=none}, [{cross,Cross}|Terms], Dir) ->
+ get_app_info(App, [{cross,none,Cross}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{cross,Name,Cross1}|Terms], Dir) ->
Cross = App#cover.cross,
- get_app_info(App#cover{cross=Cross++Cross1},Terms);
+ get_app_info(App#cover{cross=Cross++Cross1},Terms,Dir);
-get_app_info(App=#cover{app=none}, [{src_dirs,Dirs}|Terms]) ->
- get_app_info(App, [{src_dirs,none,Dirs}|Terms]);
-get_app_info(App=#cover{app=Name}, [{src_dirs,Name,Dirs}|Terms]) ->
- case get_files(Dirs, ".erl", false, []) of
+get_app_info(App=#cover{app=none}, [{src_dirs,Dirs}|Terms], Dir) ->
+ get_app_info(App, [{src_dirs,none,Dirs}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{src_dirs,Name,Dirs}|Terms], Dir) ->
+ case get_files(Dirs, Dir, ".erl", false, []) of
E = {error,_} -> E;
Src1 ->
Src = App#cover.src,
- get_app_info(App#cover{src=Src++Src1},Terms)
+ get_app_info(App#cover{src=Src++Src1},Terms,Dir)
end;
-get_app_info(App=#cover{app=none}, [{src_dirs_r,Dirs}|Terms]) ->
- get_app_info(App, [{src_dirs_r,none,Dirs}|Terms]);
-get_app_info(App=#cover{app=Name}, [{src_dirs_r,Name,Dirs}|Terms]) ->
- case get_files(Dirs, ".erl", true, []) of
+get_app_info(App=#cover{app=none}, [{src_dirs_r,Dirs}|Terms], Dir) ->
+ get_app_info(App, [{src_dirs_r,none,Dirs}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{src_dirs_r,Name,Dirs}|Terms], Dir) ->
+ case get_files(Dirs, Dir, ".erl", true, []) of
E = {error,_} -> E;
Src1 ->
Src = App#cover.src,
- get_app_info(App#cover{src=Src++Src1},Terms)
+ get_app_info(App#cover{src=Src++Src1},Terms,Dir)
end;
-get_app_info(App=#cover{app=none}, [{src_files,Src1}|Terms]) ->
- get_app_info(App, [{src_files,none,Src1}|Terms]);
-get_app_info(App=#cover{app=Name}, [{src_files,Name,Src1}|Terms]) ->
+get_app_info(App=#cover{app=none}, [{src_files,Src1}|Terms], Dir) ->
+ get_app_info(App, [{src_files,none,Src1}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{src_files,Name,Src1}|Terms], Dir) ->
Src = App#cover.src,
- get_app_info(App#cover{src=Src++Src1},Terms);
+ get_app_info(App#cover{src=Src++Src1},Terms,Dir);
-get_app_info(App, [_|Terms]) ->
- get_app_info(App, Terms);
+get_app_info(App, [_|Terms], Dir) ->
+ get_app_info(App, Terms, Dir);
-get_app_info(App, []) ->
+get_app_info(App, [], _) ->
App.
%% get_files(...)
-get_files([Dir|Dirs], Ext, Recurse, Files) ->
- case file:list_dir(Dir) of
+get_files([Dir|Dirs], RootDir, Ext, Recurse, Files) ->
+ DirAbs = filename:absname(Dir, RootDir),
+ case file:list_dir(DirAbs) of
{ok,Entries} ->
- {SubDirs,Matches} = analyse_files(Entries, Dir, Ext, [], []),
+ {SubDirs,Matches} = analyse_files(Entries, DirAbs, Ext, [], []),
if Recurse == false ->
- get_files(Dirs, Ext, Recurse, Files++Matches);
+ get_files(Dirs, RootDir, Ext, Recurse, Files++Matches);
true ->
- Files1 = get_files(SubDirs, Ext, Recurse, Files++Matches),
- get_files(Dirs, Ext, Recurse, Files1)
+ Files1 = get_files(SubDirs, RootDir, Ext, Recurse, Files++Matches),
+ get_files(Dirs, RootDir, Ext, Recurse, Files1)
end;
{error,Reason} ->
- {error,{Reason,Dir}}
+ {error,{Reason,DirAbs}}
end;
-get_files([], _Ext, _R, Files) ->
+get_files([], _RootDir, _Ext, _R, Files) ->
Files.
%% analyse_files(...)
diff --git a/lib/common_test/src/ct_release_test.erl b/lib/common_test/src/ct_release_test.erl
index eb9e9c832f..3f0b5bda67 100644
--- a/lib/common_test/src/ct_release_test.erl
+++ b/lib/common_test/src/ct_release_test.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2014. All Rights Reserved.
+%% Copyright Ericsson AB 2014-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -51,10 +51,11 @@
%% executed.
%%
%% <dl>
-%% <dt>Module:upgrade_init(State) -> NewState</dt>
+%% <dt>Module:upgrade_init(CtData,State) -> NewState</dt>
%% <dd>Types:
%%
-%% <b><c>State = NewState = cb_state()</c></b>
+%% <b><code>CtData = {@link ct_data()}</code></b><br/>
+%% <b><code>State = NewState = cb_state()</code></b>
%%
%% Initialyze system before upgrade test starts.
%%
@@ -63,17 +64,22 @@
%% the boot script, so this callback is intended for additional
%% initialization, if necessary.
%%
+%% <code>CtData</code> is an opaque data structure which shall be used
+%% in any call to <code>ct_release_test</code> inside the callback.
+%%
%% Example:
%%
%% ```
-%% upgrade_init(State) ->
+%% upgrade_init(CtData,State) ->
+%% {ok,{FromVsn,ToVsn}} = ct_release_test:get_app_vsns(CtData,myapp),
%% open_connection(State).'''
%% </dd>
%%
-%% <dt>Module:upgrade_upgraded(State) -> NewState</dt>
+%% <dt>Module:upgrade_upgraded(CtData,State) -> NewState</dt>
%% <dd>Types:
%%
-%% <b><c>State = NewState = cb_state()</c></b>
+%% <b><code>CtData = {@link ct_data()}</code></b><br/>
+%% <b><code>State = NewState = cb_state()</code></b>
%%
%% Check that upgrade was successful.
%%
@@ -82,17 +88,21 @@
%% been made permanent. It allows application specific checks to
%% ensure that the upgrade was successful.
%%
+%% <code>CtData</code> is an opaque data structure which shall be used
+%% in any call to <code>ct_release_test</code> inside the callback.
+%%
%% Example:
%%
%% ```
-%% upgrade_upgraded(State) ->
+%% upgrade_upgraded(CtData,State) ->
%% check_connection_still_open(State).'''
%% </dd>
%%
-%% <dt>Module:upgrade_downgraded(State) -> NewState</dt>
+%% <dt>Module:upgrade_downgraded(CtData,State) -> NewState</dt>
%% <dd>Types:
%%
-%% <b><c>State = NewState = cb_state()</c></b>
+%% <b><code>CtData = {@link ct_data()}</code></b><br/>
+%% <b><code>State = NewState = cb_state()</code></b>
%%
%% Check that downgrade was successful.
%%
@@ -101,10 +111,13 @@
%% made permanent. It allows application specific checks to ensure
%% that the downgrade was successful.
%%
+%% <code>CtData</code> is an opaque data structure which shall be used
+%% in any call to <code>ct_release_test</code> inside the callback.
+%%
%% Example:
%%
%% ```
-%% upgrade_init(State) ->
+%% upgrade_downgraded(CtData,State) ->
%% check_connection_closed(State).'''
%% </dd>
%% </dl>
@@ -112,7 +125,7 @@
%%-----------------------------------------------------------------
-module(ct_release_test).
--export([init/1, upgrade/4, cleanup/1]).
+-export([init/1, upgrade/4, cleanup/1, get_app_vsns/2, get_appup/2]).
-include_lib("kernel/include/file.hrl").
@@ -121,12 +134,17 @@
-define(exclude_apps, [hipe, typer, dialyzer]). % never include these apps
%%-----------------------------------------------------------------
+-record(ct_data, {from,to}).
+
+%%-----------------------------------------------------------------
-type config() :: [{atom(),term()}].
-type cb_state() :: term().
+-opaque ct_data() :: #ct_data{}.
+-export_type([ct_data/0]).
--callback upgrade_init(cb_state()) -> cb_state().
--callback upgrade_upgraded(cb_state()) -> cb_state().
--callback upgrade_downgraded(cb_state()) -> cb_state().
+-callback upgrade_init(ct_data(),cb_state()) -> cb_state().
+-callback upgrade_upgraded(ct_data(),cb_state()) -> cb_state().
+-callback upgrade_downgraded(ct_data(),cb_state()) -> cb_state().
%%-----------------------------------------------------------------
-spec init(Config) -> Result when
@@ -207,12 +225,12 @@ init(Config) ->
%% <li>Perform the upgrade test and allow customized
%% control by using callbacks:
%% <ol>
-%% <li>Callback: `upgrade_init/1'</li>
+%% <li>Callback: `upgrade_init/2'</li>
%% <li>Unpack the new release</li>
%% <li>Install the new release</li>
-%% <li>Callback: `upgrade_upgraded/1'</li>
+%% <li>Callback: `upgrade_upgraded/2'</li>
%% <li>Install the original release</li>
-%% <li>Callback: `upgrade_downgraded/1'</li>
+%% <li>Callback: `upgrade_downgraded/2'</li>
%% </ol>
%% </li>
%% </ol>
@@ -314,6 +332,71 @@ cleanup(Config) ->
Config.
%%-----------------------------------------------------------------
+-spec get_app_vsns(CtData,App) -> {ok,{From,To}} | {error,Reason} when
+ CtData :: ct_data(),
+ App :: atom(),
+ From :: string(),
+ To :: string(),
+ Reason :: {app_not_found,App}.
+%% @doc Get versions involved in this upgrade for the given application.
+%%
+%% This function can be called from inside any of the callback
+%% functions. It returns the old (From) and new (To) versions involved
+%% in the upgrade/downgrade test for the given application.
+%%
+%% <code>CtData</code> must be the first argument received in the
+%% calling callback function - an opaque data structure set by
+%% <code>ct_release_tests</code>.
+get_app_vsns(#ct_data{from=FromApps,to=ToApps},App) ->
+ case {lists:keyfind(App,1,FromApps),lists:keyfind(App,1,ToApps)} of
+ {{App,FromVsn,_},{App,ToVsn,_}} ->
+ {ok,{FromVsn,ToVsn}};
+ _ ->
+ {error,{app_not_found,App}}
+ end.
+
+%%-----------------------------------------------------------------
+-spec get_appup(CtData,App) -> {ok,Appup} | {error,Reason} when
+ CtData :: ct_data(),
+ App :: atom(),
+ Appup :: {From,To,Up,Down},
+ From :: string(),
+ To :: string(),
+ Up :: [Instr],
+ Down :: [Instr],
+ Instr :: term(),
+ Reason :: {app_not_found,App} | {vsn_not_found,{App,From}}.
+%% @doc Get appup instructions for the given application.
+%%
+%% This function can be called from inside any of the callback
+%% functions. It reads the appup file for the given application and
+%% returns the instructions for upgrade and downgrade for the versions
+%% in the test.
+%%
+%% <code>CtData</code> must be the first argument received in the
+%% calling callback function - an opaque data structure set by
+%% <code>ct_release_tests</code>.
+%%
+%% See reference manual for appup files for types definitions for the
+%% instructions.
+get_appup(#ct_data{from=FromApps,to=ToApps},App) ->
+ case lists:keyfind(App,1,ToApps) of
+ {App,ToVsn,ToDir} ->
+ Appup = filename:join([ToDir, "ebin", atom_to_list(App)++".appup"]),
+ {ok, [{ToVsn, Ups, Downs}]} = file:consult(Appup),
+ {App,FromVsn,_} = lists:keyfind(App,1,FromApps),
+ case {systools_relup:appup_search_for_version(FromVsn,Ups),
+ systools_relup:appup_search_for_version(FromVsn,Downs)} of
+ {{ok,Up},{ok,Down}} ->
+ {ok,{FromVsn,ToVsn,Up,Down}};
+ _ ->
+ {error,{vsn_not_found,{App,FromVsn}}}
+ end;
+ false ->
+ {error,{app_not_found,App}}
+ end.
+
+%%-----------------------------------------------------------------
init_upgrade_test() ->
%% Check that a real release is running, not e.g. cerl
ok = application:ensure_started(sasl),
@@ -558,8 +641,14 @@ do_upgrade({Cb,InitState},FromVsn,FromAppsVsns,ToRel,ToAppsVsns,InstallDir) ->
Start = filename:join([InstallDir,bin,start]),
{ok,Node} = start_node(Start,FromVsn,FromAppsVsns),
+ %% Add path to this module, to allow calls to get_appup/2
+ Dir = filename:dirname(code:which(?MODULE)),
+ _ = rpc:call(Node,code,add_pathz,[Dir]),
+
ct:log("Node started: ~p",[Node]),
- State1 = do_callback(Node,Cb,upgrade_init,InitState),
+ CtData = #ct_data{from = [{A,V,code:lib_dir(A)} || {A,V} <- FromAppsVsns],
+ to=[{A,V,code:lib_dir(A)} || {A,V} <- ToAppsVsns]},
+ State1 = do_callback(Node,Cb,upgrade_init,[CtData,InitState]),
[{"OTP upgrade test",FromVsn,_,permanent}] =
rpc:call(Node,release_handler,which_releases,[]),
@@ -592,7 +681,7 @@ do_upgrade({Cb,InitState},FromVsn,FromAppsVsns,ToRel,ToAppsVsns,InstallDir) ->
{"OTP upgrade test",FromVsn,_,old}] =
rpc:call(Node,release_handler,which_releases,[]),
- State2 = do_callback(Node,Cb,upgrade_upgraded,State1),
+ State2 = do_callback(Node,Cb,upgrade_upgraded,[CtData,State1]),
ct:log("Re-installing old release"),
case rpc:call(Node,release_handler,install_release,[FromVsn]) of
@@ -615,7 +704,7 @@ do_upgrade({Cb,InitState},FromVsn,FromAppsVsns,ToRel,ToAppsVsns,InstallDir) ->
{"OTP upgrade test",FromVsn,_,permanent}] =
rpc:call(Node,release_handler,which_releases,[]),
- _State3 = do_callback(Node,Cb,upgrade_downgraded,State2),
+ _State3 = do_callback(Node,Cb,upgrade_downgraded,[CtData,State2]),
ct:log("Terminating node ~p",[Node]),
erlang:monitor_node(Node,true),
@@ -625,15 +714,15 @@ do_upgrade({Cb,InitState},FromVsn,FromAppsVsns,ToRel,ToAppsVsns,InstallDir) ->
ok.
-do_callback(Node,Mod,Func,State) ->
+do_callback(Node,Mod,Func,Args) ->
Dir = filename:dirname(code:which(Mod)),
_ = rpc:call(Node,code,add_path,[Dir]),
ct:log("Calling ~p:~p/1",[Mod,Func]),
- R = rpc:call(Node,Mod,Func,[State]),
- ct:log("~p:~p/1 returned: ~p",[Mod,Func,R]),
+ R = rpc:call(Node,Mod,Func,Args),
+ ct:log("~p:~p/~w returned: ~p",[Mod,Func,length(Args),R]),
case R of
{badrpc,Error} ->
- test_server:fail({test_upgrade_callback,Mod,Func,State,Error});
+ test_server:fail({test_upgrade_callback,Mod,Func,Args,Error});
NewState ->
NewState
end.
diff --git a/lib/common_test/src/ct_run.erl b/lib/common_test/src/ct_run.erl
index 00d0aab507..3605385689 100644
--- a/lib/common_test/src/ct_run.erl
+++ b/lib/common_test/src/ct_run.erl
@@ -293,10 +293,10 @@ script_start1(Parent, Args) ->
application:set_env(common_test, auto_compile, true),
InclDirs =
case proplists:get_value(include, Args) of
- Incl when is_list(hd(Incl)) ->
- Incl;
+ Incls when is_list(hd(Incls)) ->
+ [filename:absname(IDir) || IDir <- Incls];
Incl when is_list(Incl) ->
- [Incl];
+ [filename:absname(Incl)];
undefined ->
[]
end,
@@ -1023,10 +1023,10 @@ run_test2(StartOpts) ->
case proplists:get_value(include, StartOpts) of
undefined ->
[];
- Incl when is_list(hd(Incl)) ->
- Incl;
+ Incls when is_list(hd(Incls)) ->
+ [filename:absname(IDir) || IDir <- Incls];
Incl when is_list(Incl) ->
- [Incl]
+ [filename:absname(Incl)]
end,
case os:getenv("CT_INCLUDE_PATH") of
false ->
@@ -1393,6 +1393,7 @@ run_testspec2(TestSpec) ->
EnvInclude++Opts#opts.include
end,
application:set_env(common_test, include, AllInclude),
+
LogDir1 = which(logdir,Opts#opts.logdir),
case check_and_install_configfiles(
Opts#opts.config, LogDir1, Opts) of
@@ -2134,6 +2135,14 @@ do_run_test(Tests, Skip, Opts0) ->
case check_and_add(Tests, [], []) of
{ok,AddedToPath} ->
ct_util:set_testdata({stats,{0,0,{0,0}}}),
+
+ %% test_server needs to know the include path too
+ InclPath = case application:get_env(common_test, include) of
+ {ok,Incls} -> Incls;
+ _ -> []
+ end,
+ application:set_env(test_server, include, InclPath),
+
test_server_ctrl:start_link(local),
%% let test_server expand the test tuples and count no of cases
diff --git a/lib/common_test/test/ct_cover_SUITE.erl b/lib/common_test/test/ct_cover_SUITE.erl
index 87ba4ae1b9..1dab425509 100644
--- a/lib/common_test/test/ct_cover_SUITE.erl
+++ b/lib/common_test/test/ct_cover_SUITE.erl
@@ -77,7 +77,11 @@ all() ->
ct_cover_add_remove_nodes,
otp_9956,
cross,
- export_import
+ export_import,
+ relative_incl_dirs,
+ absolute_incl_dirs,
+ relative_excl_dirs,
+ absolute_excl_dirs
].
%%--------------------------------------------------------------------
@@ -215,6 +219,45 @@ export_import(Config) ->
check_calls(Events2,2),
ok.
+relative_incl_dirs(Config) ->
+ false = check_cover(Config),
+ RelDir = rel_path(?config(priv_dir, Config), ?config(data_dir, Config)),
+ CoverSpec = [{incl_dirs, [RelDir]}],
+ CoverFile = create_cover_file(rel_incl_dirs, CoverSpec, Config),
+ Opts = [{cover, CoverFile}],
+ {ok, Events} = run_test(rel_incl_dirs, default, Opts, Config),
+ check_calls(Events, 1),
+ ok.
+
+absolute_incl_dirs(Config) ->
+ false = check_cover(Config),
+ CoverSpec = [{incl_dirs, [?config(data_dir, Config)]}],
+ CoverFile = create_cover_file(abs_incl_dirs, CoverSpec, Config),
+ Opts = [{cover, CoverFile}],
+ {ok, Events} = run_test(abs_incl_dirs, default, Opts, Config),
+ check_calls(Events, 1),
+ ok.
+
+relative_excl_dirs(Config) ->
+ false = check_cover(Config),
+ RelDir = rel_path(?config(priv_dir, Config), ?config(data_dir, Config)),
+ CoverSpec = default_cover_file_content() ++ [{excl_dirs, [RelDir]}],
+ CoverFile = create_cover_file(rel_excl_dirs, CoverSpec, Config),
+ Opts = [{cover, CoverFile}],
+ {ok, Events} = run_test(rel_excl_dirs, default_no_cover, Opts, Config),
+ check_no_cover_compiled(Events),
+ ok.
+
+absolute_excl_dirs(Config) ->
+ false = check_cover(Config),
+ AbsDir = ?config(data_dir, Config),
+ CoverSpec = default_cover_file_content() ++ [{excl_dirs, [AbsDir]}],
+ CoverFile = create_cover_file(abs_excl_dirs, CoverSpec, Config),
+ Opts = [{cover, CoverFile}],
+ {ok, Events} = run_test(abs_excl_dirs, default_no_cover, Opts, Config),
+ check_no_cover_compiled(Events),
+ ok.
+
%%%-----------------------------------------------------------------
%%% HELP FUNCTIONS
%%%-----------------------------------------------------------------
@@ -288,23 +331,36 @@ get_log_dirs(Events) ->
{ct_test_support_eh,
{event,start_logging,_Node,LogDir}} <- Events].
+%% Check if a module was compiled without cover
+check_no_cover_compiled(Events) ->
+ check_no_cover_compiled(Events, ?mod).
+check_no_cover_compiled(Events, Mod) ->
+ [ {error, {not_cover_compiled, Mod}} = analyse_log(CoverLog, Mod)
+ || CoverLog <- cover_logs(Events) ].
+
%% Check that each coverlog includes N calls to ?mod:foo/0
check_calls(Events,N) ->
check_calls(Events,{?mod,foo,0},N).
check_calls(Events,MFA,N) ->
- CoverLogs = [filename:join(D,"all.coverdata") || D <- get_log_dirs(Events)],
- do_check_logs(CoverLogs,MFA,N).
+ do_check_logs(cover_logs(Events),MFA,N).
do_check_logs([CoverLog|CoverLogs],{Mod,_,_} = MFA,N) ->
- {ok,_} = cover:start(),
- ok = cover:import(CoverLog),
- {ok,Calls} = cover:analyse(Mod,calls,function),
- ok = cover:stop(),
+ {ok, Calls} = analyse_log(CoverLog, Mod),
{MFA,N} = lists:keyfind(MFA,1,Calls),
do_check_logs(CoverLogs,MFA,N);
do_check_logs([],_,_) ->
ok.
+cover_logs(Events) ->
+ [filename:join(D,"all.coverdata") || D <- get_log_dirs(Events)].
+
+analyse_log(CoverLog, Mod) ->
+ {ok, _} = cover:start(),
+ ok = cover:import(CoverLog),
+ Result = cover:analyse(Mod, calls, function),
+ ok = cover:stop(),
+ Result.
+
fullname(Name) ->
{ok,Host} = inet:gethostname(),
list_to_atom(atom_to_list(Name) ++ "@" ++ Host).
@@ -333,3 +389,12 @@ start_slave(Name,Args) ->
{boot_timeout,10}, % extending some timers for slow test hosts
{init_timeout,10},
{startup_timeout,10}]).
+
+rel_path(From, To) ->
+ Segments = do_rel_path(filename:split(From), filename:split(To)),
+ filename:join(Segments).
+
+do_rel_path([Seg|RestA], [Seg|RestB]) ->
+ do_rel_path(RestA, RestB);
+do_rel_path(PathA, PathB) ->
+ lists:duplicate(length(PathA), "..") ++ PathB.
diff --git a/lib/common_test/test/ct_cover_SUITE_data/cover_SUITE.erl b/lib/common_test/test/ct_cover_SUITE_data/cover_SUITE.erl
index 83d368c53d..789e48bd96 100644
--- a/lib/common_test/test/ct_cover_SUITE_data/cover_SUITE.erl
+++ b/lib/common_test/test/ct_cover_SUITE_data/cover_SUITE.erl
@@ -71,6 +71,10 @@ default(_Config) ->
cover_test_mod:foo(),
ok.
+default_no_cover(_Config) ->
+ cover_test_mod:foo(),
+ ok.
+
slave(_Config) ->
cover_compiled = code:which(cover_test_mod),
cover_test_mod:foo(),
diff --git a/lib/compiler/src/Makefile b/lib/compiler/src/Makefile
index c6d09d85eb..2032392821 100644
--- a/lib/compiler/src/Makefile
+++ b/lib/compiler/src/Makefile
@@ -81,6 +81,7 @@ MODULES = \
rec_env \
sys_core_dsetel \
sys_core_fold \
+ sys_core_fold_lists \
sys_core_inline \
sys_pre_attributes \
sys_pre_expand \
@@ -187,6 +188,7 @@ $(EBIN)/core_parse.beam: core_parse.hrl $(EGEN)/core_parse.erl
$(EBIN)/core_pp.beam: core_parse.hrl
$(EBIN)/sys_core_dsetel.beam: core_parse.hrl
$(EBIN)/sys_core_fold.beam: core_parse.hrl
+$(EBIN)/sys_core_fold_lists.beam: core_parse.hrl
$(EBIN)/sys_core_inline.beam: core_parse.hrl
$(EBIN)/sys_pre_expand.beam: ../../stdlib/include/erl_bits.hrl
$(EBIN)/v3_codegen.beam: v3_life.hrl
diff --git a/lib/compiler/src/beam_block.erl b/lib/compiler/src/beam_block.erl
index 5626aa34ab..7d65dc983a 100644
--- a/lib/compiler/src/beam_block.erl
+++ b/lib/compiler/src/beam_block.erl
@@ -155,7 +155,7 @@ collect(remove_message) -> {set,[],[],remove_message};
collect({put_map,F,Op,S,D,R,{list,Puts}}) ->
{set,[D],[S|Puts],{alloc,R,{put_map,Op,F}}};
collect({get_map_elements,F,S,{list,Gets}}) ->
- {Ss,Ds} = beam_utils:spliteven(Gets),
+ {Ss,Ds} = beam_utils:split_even(Gets),
{set,Ds,[S|Ss],{get_map_elements,F}};
collect({'catch',R,L}) -> {set,[R],[],{'catch',L}};
collect(fclearerror) -> {set,[],[],fclearerror};
diff --git a/lib/compiler/src/beam_bool.erl b/lib/compiler/src/beam_bool.erl
index 5a4621dc37..a452d30b61 100644
--- a/lib/compiler/src/beam_bool.erl
+++ b/lib/compiler/src/beam_bool.erl
@@ -126,44 +126,53 @@ bopt_block(Reg, Fail, OldIs, [{block,Bl0}|Acc0], St0) ->
%% There was a reference to a boolean expression
%% from inside a protected block (try/catch), to
%% a boolean expression outside.
- throw:protected_barrier ->
+ throw:protected_barrier ->
failed;
- %% The 'xor' operator was used. We currently don't
- %% find it worthwile to translate 'xor' operators
- %% (the code would be clumsy).
- throw:'xor' ->
+ %% The 'xor' operator was used. We currently don't
+ %% find it worthwile to translate 'xor' operators
+ %% (the code would be clumsy).
+ throw:'xor' ->
failed;
- %% The block does not contain a boolean expression,
- %% but only a call to a guard BIF.
- %% For instance: ... when element(1, T) ->
- throw:not_boolean_expr ->
+ %% The block does not contain a boolean expression,
+ %% but only a call to a guard BIF.
+ %% For instance: ... when element(1, T) ->
+ throw:not_boolean_expr ->
failed;
- %% The block contains a 'move' instruction that could
- %% not be handled.
- throw:move ->
+ %% The block contains a 'move' instruction that could
+ %% not be handled.
+ throw:move ->
failed;
- %% The optimization is not safe. (A register
- %% used by the instructions following the
- %% optimized code is either not assigned a
- %% value at all or assigned a different value.)
- throw:all_registers_not_killed ->
+ %% The optimization is not safe. (A register
+ %% used by the instructions following the
+ %% optimized code is either not assigned a
+ %% value at all or assigned a different value.)
+ throw:all_registers_not_killed ->
failed;
- throw:registers_used ->
+ throw:registers_used ->
failed;
- %% A protected block refered to the value
- %% returned by another protected block,
- %% probably because the Core Erlang code
- %% used nested try/catches in the guard.
- %% (v3_core never produces nested try/catches
- %% in guards, so it must have been another
- %% Core Erlang translator.)
- throw:protected_violation ->
+ %% A protected block refered to the value
+ %% returned by another protected block,
+ %% probably because the Core Erlang code
+ %% used nested try/catches in the guard.
+ %% (v3_core never produces nested try/catches
+ %% in guards, so it must have been another
+ %% Core Erlang translator.)
+ throw:protected_violation ->
+ failed;
+
+ %% Failed to work out the live registers for a GC
+ %% BIF. For example, if the number of live registers
+ %% needed to be 4 because {x,3} was a source register,
+ %% but {x,2} was not known to be initialized, this
+ %% exception would be thrown.
+ throw:gc_bif_alloc_failure ->
failed
+
end
end.
@@ -665,10 +674,16 @@ put_reg_1(V, [], I) -> [{I,V}].
fetch_reg(V, [{I,V}|_]) -> {x,I};
fetch_reg(V, [_|SRs]) -> fetch_reg(V, SRs).
-live_regs(Regs) ->
- foldl(fun ({I,_}, _) ->
- I
- end, -1, Regs)+1.
+live_regs([{_,reserved}|_]) ->
+ %% We are not sure that this register is initialized, so we must
+ %% abort the optimization.
+ throw(gc_bif_alloc_failure);
+live_regs([{I,_}]) ->
+ I+1;
+live_regs([{_,_}|Regs]) ->
+ live_regs(Regs);
+live_regs([]) ->
+ 0.
%%%
diff --git a/lib/compiler/src/beam_clean.erl b/lib/compiler/src/beam_clean.erl
index b653998252..b68b8702e0 100644
--- a/lib/compiler/src/beam_clean.erl
+++ b/lib/compiler/src/beam_clean.erl
@@ -234,31 +234,6 @@ replace([{bs_init,{f,Lbl},Info,Live,Ss,Dst}|Is], Acc, D) when Lbl =/= 0 ->
replace(Is, [{bs_init,{f,label(Lbl, D)},Info,Live,Ss,Dst}|Acc], D);
replace([{bs_put,{f,Lbl},Info,Ss}|Is], Acc, D) when Lbl =/= 0 ->
replace(Is, [{bs_put,{f,label(Lbl, D)},Info,Ss}|Acc], D);
-replace([{bs_init2,{f,Lbl},Sz,Words,R,F,Dst}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{bs_init2,{f,label(Lbl, D)},Sz,Words,R,F,Dst}|Acc], D);
-replace([{bs_init_bits,{f,Lbl},Sz,Words,R,F,Dst}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{bs_init_bits,{f,label(Lbl, D)},Sz,Words,R,F,Dst}|Acc], D);
-replace([{bs_put_integer,{f,Lbl},Bits,Unit,Fl,Val}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{bs_put_integer,{f,label(Lbl, D)},Bits,Unit,Fl,Val}|Acc], D);
-replace([{bs_put_utf8=I,{f,Lbl},Fl,Val}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{I,{f,label(Lbl, D)},Fl,Val}|Acc], D);
-replace([{bs_put_utf16=I,{f,Lbl},Fl,Val}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{I,{f,label(Lbl, D)},Fl,Val}|Acc], D);
-replace([{bs_put_utf32=I,{f,Lbl},Fl,Val}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{I,{f,label(Lbl, D)},Fl,Val}|Acc], D);
-replace([{bs_put_binary,{f,Lbl},Bits,Unit,Fl,Val}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{bs_put_binary,{f,label(Lbl, D)},Bits,Unit,Fl,Val}|Acc], D);
-replace([{bs_put_float,{f,Lbl},Bits,Unit,Fl,Val}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{bs_put_float,{f,label(Lbl, D)},Bits,Unit,Fl,Val}|Acc], D);
-replace([{bs_add,{f,Lbl},Src,Dst}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{bs_add,{f,label(Lbl, D)},Src,Dst}|Acc], D);
-replace([{bs_append,{f,Lbl},_,_,_,_,_,_,_}=I0|Is], Acc, D) when Lbl =/= 0 ->
- I = setelement(2, I0, {f,label(Lbl, D)}),
- replace(Is, [I|Acc], D);
-replace([{bs_utf8_size=I,{f,Lbl},Src,Dst}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{I,{f,label(Lbl, D)},Src,Dst}|Acc], D);
-replace([{bs_utf16_size=I,{f,Lbl},Src,Dst}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{I,{f,label(Lbl, D)},Src,Dst}|Acc], D);
replace([{put_map=I,{f,Lbl},Op,Src,Dst,Live,List}|Is], Acc, D)
when Lbl =/= 0 ->
replace(Is, [{I,{f,label(Lbl, D)},Op,Src,Dst,Live,List}|Acc], D);
diff --git a/lib/compiler/src/beam_dead.erl b/lib/compiler/src/beam_dead.erl
index b15adfa889..7cd07dc3be 100644
--- a/lib/compiler/src/beam_dead.erl
+++ b/lib/compiler/src/beam_dead.erl
@@ -21,112 +21,10 @@
-export([module/2]).
-%%% The following optimisations are done:
-%%%
-%%% (1) In this code
-%%%
-%%% move DeadValue {x,0}
-%%% jump L2
-%%% .
-%%% .
-%%% .
-%%% L2: move Anything {x,0}
-%%% .
-%%% .
-%%% .
-%%%
-%%% the first assignment to {x,0} has no effect (is dead),
-%%% so it can be removed. Besides removing a move instruction,
-%%% if the move was preceeded by a label, the resulting code
-%%% will look this
-%%%
-%%% L1: jump L2
-%%% .
-%%% .
-%%% .
-%%% L2: move Anything {x,0}
-%%% .
-%%% .
-%%% .
-%%%
-%%% which can be further optimized by the jump optimizer (beam_jump).
-%%%
-%%% (2) In this code
-%%%
-%%% L1: move AtomLiteral {x,0}
-%%% jump L2
-%%% .
-%%% .
-%%% .
-%%% L2: test is_atom FailLabel {x,0}
-%%% select_val {x,0}, FailLabel [... AtomLiteral => L3...]
-%%% .
-%%% .
-%%% .
-%%% L3: ...
-%%%
-%%% FailLabel: ...
-%%%
-%%% the first code fragment can be changed to
-%%%
-%%% L1: move AtomLiteral {x,0}
-%%% jump L3
-%%%
-%%% If the literal is not included in the table of literals in the
-%%% select_val instruction, the first code fragment will instead be
-%%% rewritten as:
-%%%
-%%% L1: move AtomLiteral {x,0}
-%%% jump FailLabel
-%%%
-%%% The move instruction will be removed by optimization (1) above,
-%%% if the code following the L3 label overwrites {x,0}.
-%%%
-%%% The code following the L2 label will be kept, but it will be removed later
-%%% by the jump optimizer.
-%%%
-%%% (3) In this code
-%%%
-%%% test is_eq_exact ALabel Src Dst
-%%% move Src Dst
-%%%
-%%% the move instruction can be removed.
-%%% Same thing for
-%%%
-%%% test is_nil ALabel Dst
-%%% move [] Dst
-%%%
-%%%
-%%% (4) In this code
-%%%
-%%% select_val {x,Reg}, ALabel [... Literal => L1...]
-%%% .
-%%% .
-%%% .
-%%% L1: move Literal {x,Reg}
-%%%
-%%% we can remove the move instruction.
-%%%
-%%% (5) In the following code
-%%%
-%%% bif '=:=' Fail Src1 Src2 {x,0}
-%%% jump L1
-%%% .
-%%% .
-%%% .
-%%% L1: select_val {x,0}, ALabel [... true => L2..., ...false => L3...]
-%%% .
-%%% .
-%%% .
-%%% L2: .... L3: ....
-%%%
-%%% the first two instructions can be replaced with
-%%%
-%%% test is_eq_exact L3 Src1 Src2
-%%% jump L2
-%%%
-%%% provided that {x,0} is killed at both L2 and L3.
-%%%
+%%% Dead code is code that is executed but has no effect. This
+%%% optimization pass either removes dead code or jumps around it,
+%%% potentially making it unreachable and a target for the
+%%% the beam_jump pass.
-import(lists, [mapfoldl/3,reverse/1]).
@@ -173,7 +71,28 @@ move_move_into_block([I|Is], Acc) ->
move_move_into_block([], Acc) -> reverse(Acc).
%%%
-%%% Scan instructions in execution order and remove dead code.
+%%% Scan instructions in execution order and remove redundant 'move'
+%%% instructions. 'move' instructions are redundant if we know that
+%%% the register already contains the value being assigned, as in the
+%%% following code:
+%%%
+%%% test is_eq_exact SomeLabel Src Dst
+%%% move Src Dst
+%%%
+%%% or in:
+%%%
+%%% test is_nil SomeLabel Dst
+%%% move nil Dst
+%%%
+%%% or in:
+%%%
+%%% select_val Register FailLabel [... Literal => L1...]
+%%% .
+%%% .
+%%% .
+%%% L1: move Literal Register
+%%%
+%%% Also add extra labels to help the second backward pass.
%%%
forward(Is, Lc) ->
@@ -215,15 +134,13 @@ forward([{test,is_eq_exact,_,[Dst,Src]}=I,{move,Src,Dst}|Is], D, Lc, Acc) ->
forward([I|Is], D, Lc, Acc);
forward([{test,is_nil,_,[Dst]}=I,{move,nil,Dst}|Is], D, Lc, Acc) ->
forward([I|Is], D, Lc, Acc);
-forward([{test,is_eq_exact,_,_}=I|Is], D, Lc, Acc) ->
- case Is of
- [{label,_}|_] -> forward(Is, D, Lc, [I|Acc]);
- _ -> forward(Is, D, Lc+1, [{label,Lc},I|Acc])
- end;
-forward([{test,is_ne_exact,_,_}=I|Is], D, Lc, Acc) ->
- case Is of
- [{label,_}|_] -> forward(Is, D, Lc, [I|Acc]);
- _ -> forward(Is, D, Lc+1, [{label,Lc},I|Acc])
+forward([{test,_,_,_}=I|Is]=Is0, D, Lc, Acc) ->
+ %% Help the second, backward pass to by inserting labels after
+ %% relational operators so that they can be skipped if they are
+ %% known to be true.
+ case useful_to_insert_label(Is0) of
+ false -> forward(Is, D, Lc, [I|Acc]);
+ true -> forward(Is, D, Lc+1, [{label,Lc},I|Acc])
end;
forward([I|Is], D, Lc, Acc) ->
forward(Is, D, Lc, [I|Acc]);
@@ -239,9 +156,49 @@ update_value_dict([Lit,{f,Lbl}|T], Reg, D0) ->
update_value_dict(T, Reg, D);
update_value_dict([], _, D) -> D.
+useful_to_insert_label([_,{label,_}|_]) ->
+ false;
+useful_to_insert_label([{test,Op,_,_}|_]) ->
+ case Op of
+ is_lt -> true;
+ is_ge -> true;
+ is_eq_exact -> true;
+ is_ne_exact -> true;
+ _ -> false
+ end.
+
+%%%
+%%% Scan instructions in reverse execution order and try to
+%%% shortcut branch instructions.
+%%%
+%%% For example, in this code:
+%%%
+%%% move Literal Register
+%%% jump L1
+%%% .
+%%% .
+%%% .
+%%% L1: test is_{integer,atom} FailLabel Register
+%%% select_val {x,0} FailLabel [... Literal => L2...]
+%%% .
+%%% .
+%%% .
+%%% L2: ...
%%%
-%%% Scan instructions in reverse execution order and remove dead code.
+%%% the 'selectval' instruction will always transfer control to L2,
+%%% so we can just as well jump to L2 directly by rewriting the
+%%% first part of the sequence like this:
%%%
+%%% move Literal Register
+%%% jump L2
+%%%
+%%% If register Register is killed at label L2, we can remove the
+%%% 'move' instruction, leaving just the 'jump' instruction:
+%%%
+%%% jump L2
+%%%
+%%% These transformations may leave parts of the code unreachable.
+%%% The beam_jump pass will remove the unreachable code.
backward(Is, D) ->
backward(Is, D, []).
@@ -277,15 +234,10 @@ backward([{select,select_val,Reg,{f,Fail0},List0}|Is], D, Acc) ->
Fail = shortcut_bs_test(Fail1, Is, D),
Sel = {select,select_val,Reg,{f,Fail},List},
backward(Is, D, [Sel|Acc]);
-backward([{jump,{f,To0}},{move,Src,Reg}=Move0|Is], D, Acc) ->
- {To,Move} = case Src of
- {atom,Val0} ->
- To1 = shortcut_select_label(To0, Reg, Val0, D),
- {To2,Val} = shortcut_boolean_label(To1, Reg, Val0, D),
- {To2,{move,{atom,Val},Reg}};
- _ ->
- {shortcut_label(To0, D),Move0}
- end,
+backward([{jump,{f,To0}},{move,Src0,Reg}|Is], D, Acc) ->
+ To1 = shortcut_select_label(To0, Reg, Src0, D),
+ {To,Src} = shortcut_boolean_label(To1, Reg, Src0, D),
+ Move = {move,Src,Reg},
Jump = {jump,{f,To}},
case beam_utils:is_killed_at(Reg, To, D) of
false -> backward([Move|Is], D, [Jump|Acc]);
@@ -301,28 +253,25 @@ backward([{test,bs_start_match2,{f,To0},Live,[Src|_]=Info,Dst}|Is], D, Acc) ->
To = shortcut_bs_start_match(To0, Src, D),
I = {test,bs_start_match2,{f,To},Live,Info,Dst},
backward(Is, D, [I|Acc]);
-backward([{test,is_eq_exact,{f,To0},[Reg,{atom,Val}]=Ops}|Is], D, Acc) ->
- To1 = shortcut_bs_test(To0, Is, D),
- To = shortcut_fail_label(To1, Reg, Val, D),
- I = combine_eqs(To, Ops, D, Acc),
- backward(Is, D, [I|Acc]);
backward([{test,Op,{f,To0},Ops0}|Is], D, Acc) ->
To1 = shortcut_bs_test(To0, Is, D),
To2 = shortcut_label(To1, D),
+ To3 = shortcut_rel_op(To2, Op, Ops0, D),
+
%% Try to shortcut a repeated test:
%%
%% test Op {f,Fail1} Operands test Op {f,Fail2} Operands
%% . . . ==> ...
%% Fail1: test Op {f,Fail2} Operands Fail1: test Op {f,Fail2} Operands
%%
- To = case beam_utils:code_at(To2, D) of
- [{test,Op,{f,To3},Ops}|_] ->
+ To = case beam_utils:code_at(To3, D) of
+ [{test,Op,{f,To4},Ops}|_] ->
case equal_ops(Ops0, Ops) of
- true -> To3;
- false -> To2
+ true -> To4;
+ false -> To3
end;
_Code ->
- To2
+ To3
end,
I = case Op of
is_eq_exact -> combine_eqs(To, Ops0, D, Acc);
@@ -367,8 +316,8 @@ equal_ops([Op|T0], [Op|T1]) ->
equal_ops([], []) -> true;
equal_ops(_, _) -> false.
-shortcut_select_list([{_,Val}=Lit,{f,To0}|T], Reg, D, Acc) ->
- To = shortcut_select_label(To0, Reg, Val, D),
+shortcut_select_list([Lit,{f,To0}|T], Reg, D, Acc) ->
+ To = shortcut_select_label(To0, Reg, Lit, D),
shortcut_select_list(T, Reg, D, [{f,To},Lit|Acc]);
shortcut_select_list([], _, _, Acc) -> reverse(Acc).
@@ -378,58 +327,39 @@ shortcut_label(To0, D) ->
_ -> To0
end.
-shortcut_select_label(To0, Reg, Val, D) ->
- case beam_utils:code_at(To0, D) of
- [{jump,{f,To}}|_] ->
- shortcut_select_label(To, Reg, Val, D);
- [{test,is_atom,_,[Reg]},{select,select_val,Reg,{f,Fail},Map}|_] ->
- To = find_select_val(Map, Val, Fail),
- shortcut_select_label(To, Reg, Val, D);
- [{test,is_eq_exact,{f,_},[Reg,{atom,Val}]},{label,To}|_] when is_atom(Val) ->
- shortcut_select_label(To, Reg, Val, D);
- [{test,is_eq_exact,{f,_},[Reg,{atom,Val}]},{jump,{f,To}}|_] when is_atom(Val) ->
- shortcut_select_label(To, Reg, Val, D);
- [{test,is_eq_exact,{f,To},[Reg,{atom,AnotherVal}]}|_]
- when is_atom(Val), Val =/= AnotherVal ->
- shortcut_select_label(To, Reg, Val, D);
- [{test,is_ne_exact,{f,To},[Reg,{atom,Val}]}|_] when is_atom(Val) ->
- shortcut_select_label(To, Reg, Val, D);
- [{test,is_ne_exact,{f,_},[Reg,{atom,_}]},{label,To}|_] when is_atom(Val) ->
- shortcut_select_label(To, Reg, Val, D);
- [{test,is_tuple,{f,To},[Reg]}|_] when is_atom(Val) ->
- shortcut_select_label(To, Reg, Val, D);
- _ ->
- To0
- end.
+shortcut_select_label(To, Reg, Lit, D) ->
+ shortcut_rel_op(To, is_ne_exact, [Reg,Lit], D).
-shortcut_fail_label(To0, Reg, Val, D) ->
- case beam_utils:code_at(To0, D) of
- [{jump,{f,To}}|_] ->
- shortcut_fail_label(To, Reg, Val, D);
- [{test,is_eq_exact,{f,To},[Reg,{atom,Val}]}|_] when is_atom(Val) ->
- shortcut_fail_label(To, Reg, Val, D);
- _ ->
- To0
- end.
-
-shortcut_boolean_label(To0, Reg, Bool0, D) when is_boolean(Bool0) ->
+shortcut_boolean_label(To0, Reg, {atom,Bool0}=Lit, D) when is_boolean(Bool0) ->
case beam_utils:code_at(To0, D) of
[{line,_},{bif,'not',_,[Reg],Reg},{jump,{f,To}}|_] ->
- Bool = not Bool0,
+ Bool = {atom,not Bool0},
{shortcut_select_label(To, Reg, Bool, D),Bool};
_ ->
- {To0,Bool0}
+ {To0,Lit}
end;
shortcut_boolean_label(To, _, Bool, _) -> {To,Bool}.
-find_select_val([{_,Val},{f,To}|_], Val, _) -> To;
-find_select_val([{_,_}, {f,_}|T], Val, Fail) ->
- find_select_val(T, Val, Fail);
-find_select_val([], _, Fail) -> Fail.
+%% Replace a comparison operator with a test instruction and a jump.
+%% For example, if we have this code:
+%%
+%% bif '=:=' Fail Src1 Src2 {x,0}
+%% jump L1
+%% .
+%% .
+%% .
+%% L1: select_val {x,0} FailLabel [... true => L2..., ...false => L3...]
+%%
+%% the first two instructions can be replaced with
+%%
+%% test is_eq_exact L3 Src1 Src2
+%% jump L2
+%%
+%% provided that {x,0} is killed at both L2 and L3.
replace_comp_op(To, Reg, Op, Ops, D) ->
- False = comp_op_find_shortcut(To, Reg, false, D),
- True = comp_op_find_shortcut(To, Reg, true, D),
+ False = comp_op_find_shortcut(To, Reg, {atom,false}, D),
+ True = comp_op_find_shortcut(To, Reg, {atom,true}, D),
[bif_to_test(Op, Ops, False),{jump,{f,True}}].
comp_op_find_shortcut(To0, Reg, Val, D) ->
@@ -461,9 +391,9 @@ not_possible() -> throw(not_possible).
%%
%% is_eq_exact F1 Reg Lit1 select_val Reg F2 [ Lit1 L1
%% L1: . Lit2 L2 ]
-%% .
-%% . ==>
-%% .
+%% .
+%% . ==>
+%% .
%% F1: is_eq_exact F2 Reg Lit2 F1: is_eq_exact F2 Reg Lit2
%% L2: .... L2:
%%
@@ -488,31 +418,26 @@ remove_from_list(Lit, [Val,{f,_}=Fail|T]) ->
[Val,Fail|remove_from_list(Lit, T)];
remove_from_list(_, []) -> [].
-%% shortcut_bs_test(TargetLabel, [Instruction], D) -> TargetLabel'
-%% Try to shortcut the failure label for a bit syntax matching.
-%% We know that the binary contains at least Bits bits after
-%% the latest save point.
+%% shortcut_bs_test(TargetLabel, ReversedInstructions, D) -> TargetLabel'
+%% Try to shortcut the failure label for bit syntax matching.
shortcut_bs_test(To, Is, D) ->
shortcut_bs_test_1(beam_utils:code_at(To, D), Is, To, D).
-shortcut_bs_test_1([{bs_restore2,Reg,SavePoint}|Is], PrevIs, To, D) ->
- shortcut_bs_test_2(Is, {Reg,SavePoint}, PrevIs, To, D);
-shortcut_bs_test_1([_|_], _, To, _) -> To.
-
-shortcut_bs_test_2([{label,_}|Is], Save, PrevIs, To, D) ->
- shortcut_bs_test_2(Is, Save, PrevIs, To, D);
-shortcut_bs_test_2([{test,bs_test_tail2,{f,To},[_,TailBits]}|_],
- {Reg,_Point} = RP, PrevIs, To0, D) ->
- case count_bits_matched(PrevIs, RP, 0) of
+shortcut_bs_test_1([{bs_restore2,Reg,SavePoint},
+ {label,_},
+ {test,bs_test_tail2,{f,To},[_,TailBits]}|_],
+ PrevIs, To0, D) ->
+ case count_bits_matched(PrevIs, {Reg,SavePoint}, 0) of
Bits when Bits > TailBits ->
%% This instruction will fail. We know because a restore has been
- %% done from the previous point SavePoint in the binary, and we also know
- %% that the binary contains at least Bits bits from SavePoint.
+ %% done from the previous point SavePoint in the binary, and we
+ %% also know that the binary contains at least Bits bits from
+ %% SavePoint.
%%
%% Since we will skip a bs_restore2 if we shortcut to label To,
- %% we must now make sure that code at To does not depend on the position
- %% in the context in any way.
+ %% we must now make sure that code at To does not depend on
+ %% the position in the context in any way.
case shortcut_bs_pos_used(To, Reg, D) of
false -> To;
true -> To0
@@ -520,8 +445,19 @@ shortcut_bs_test_2([{test,bs_test_tail2,{f,To},[_,TailBits]}|_],
_Bits ->
To0
end;
-shortcut_bs_test_2([_|_], _, _, To, _) -> To.
+shortcut_bs_test_1([_|_], _, To, _) -> To.
+%% counts_bits_matched(ReversedInstructions, SavePoint, Bits) -> Bits'
+%% Given a reversed instruction stream, determine the minimum number
+%% of bits that will be matched by bit syntax instructions up to the
+%% given save point.
+
+count_bits_matched([{test,bs_get_utf8,{f,_},_,_,_}|Is], SavePoint, Bits) ->
+ count_bits_matched(Is, SavePoint, Bits+8);
+count_bits_matched([{test,bs_get_utf16,{f,_},_,_,_}|Is], SavePoint, Bits) ->
+ count_bits_matched(Is, SavePoint, Bits+16);
+count_bits_matched([{test,bs_get_utf32,{f,_},_,_,_}|Is], SavePoint, Bits) ->
+ count_bits_matched(Is, SavePoint, Bits+32);
count_bits_matched([{test,_,_,_,[_,Sz,U,{field_flags,_}],_}|Is], SavePoint, Bits) ->
case Sz of
{integer,N} -> count_bits_matched(Is, SavePoint, Bits+N*U);
@@ -545,20 +481,332 @@ shortcut_bs_pos_used_1(Is, Reg, D) ->
not beam_utils:is_killed(Reg, Is, D).
%% shortcut_bs_start_match(TargetLabel, Reg) -> TargetLabel
-%% A failing bs_start_match2 instruction means that the source
-%% cannot be a binary, so there is no need to jump bs_context_to_binary/1
-%% or another bs_start_match2 instruction.
+%% A failing bs_start_match2 instruction means that the source (Reg)
+%% cannot be a binary. That means that it is safe to skip
+%% bs_context_to_binary instructions operating on Reg, and
+%% bs_start_match2 instructions operating on Reg.
shortcut_bs_start_match(To, Reg, D) ->
- shortcut_bs_start_match_1(beam_utils:code_at(To, D), Reg, To).
+ shortcut_bs_start_match_1(beam_utils:code_at(To, D), Reg, To, D).
+
+shortcut_bs_start_match_1([{bs_context_to_binary,Reg}|Is], Reg, To, D) ->
+ shortcut_bs_start_match_1(Is, Reg, To, D);
+shortcut_bs_start_match_1([{jump,{f,To}}|_], Reg, _, D) ->
+ Code = beam_utils:code_at(To, D),
+ shortcut_bs_start_match_1(Code, Reg, To, D);
+shortcut_bs_start_match_1([{test,bs_start_match2,{f,To},_,[Reg|_],_}|_],
+ Reg, _, D) ->
+ Code = beam_utils:code_at(To, D),
+ shortcut_bs_start_match_1(Code, Reg, To, D);
+shortcut_bs_start_match_1(_, _, To, _) ->
+ To.
-shortcut_bs_start_match_1([{bs_context_to_binary,Reg}|Is], Reg, To) ->
- shortcut_bs_start_match_2(Is, Reg, To);
-shortcut_bs_start_match_1(_, _, To) -> To.
+%% shortcut_rel_op(FailLabel, Operator, [Operand], D) -> FailLabel'
+%% Try to shortcut the given test instruction. Example:
+%%
+%% is_ge L1 {x,0} 48
+%% .
+%% .
+%% .
+%% L1: is_ge L2 {x,0} 65
+%%
+%% The first test instruction can be rewritten to "is_ge L2 {x,0} 48"
+%% since the instruction at L1 will also fail.
+%%
+%% If there are instructions between L1 and the other test instruction
+%% it may still be possible to do the shortcut. For example:
+%%
+%% L1: is_eq_exact L3 {x,0} 92
+%% is_ge L2 {x,0} 65
+%%
+%% Since the first test instruction failed, we know that {x,0} must
+%% be less than 48; therefore, we know that {x,0} cannot be equal to
+%% 92 and the jump to L3 cannot happen.
+
+shortcut_rel_op(To, Op, Ops, D) ->
+ case normalize_op({test,Op,{f,To},Ops}) of
+ {{NormOp,A,B},_} ->
+ Normalized = {negate_op(NormOp),A,B},
+ shortcut_rel_op_fp(To, Normalized, D);
+ {_,_} ->
+ To;
+ error ->
+ To
+ end.
-shortcut_bs_start_match_2([{jump,{f,To}}|_], _, _) ->
- To;
-shortcut_bs_start_match_2([{test,bs_start_match2,{f,To},_,[Reg|_],_}|_], Reg, _) ->
- To;
-shortcut_bs_start_match_2(_Is, _Reg, To) ->
- To.
+shortcut_rel_op_fp(To0, Normalized, D) ->
+ Code = beam_utils:code_at(To0, D),
+ case shortcut_any_label(Code, Normalized) of
+ error ->
+ To0;
+ To ->
+ shortcut_rel_op_fp(To, Normalized, D)
+ end.
+
+%% shortcut_any_label([Instruction], PrevCondition) -> FailLabel | error
+%% Using PrevCondition (a previous condition known to be true),
+%% try to shortcut to another failure label.
+
+shortcut_any_label([{jump,{f,Lbl}}|_], _Prev) ->
+ Lbl;
+shortcut_any_label([{label,Lbl}|_], _Prev) ->
+ Lbl;
+shortcut_any_label([{select,select_val,R,{f,Fail},L}|_], Prev) ->
+ shortcut_selectval(L, R, Fail, Prev);
+shortcut_any_label([I|Is], Prev) ->
+ case normalize_op(I) of
+ error ->
+ error;
+ {Normalized,Fail} ->
+ %% We have a relational operator.
+ case will_succeed(Prev, Normalized) of
+ no ->
+ %% This test instruction will always branch
+ %% to Fail.
+ Fail;
+ yes ->
+ %% This test instruction will never branch,
+ %% so we will look at the next instruction.
+ shortcut_any_label(Is, Prev);
+ maybe ->
+ %% May or may not branch. From now on, we can only
+ %% shortcut to the this specific failure label
+ %% Fail.
+ shortcut_specific_label(Is, Fail, Prev)
+ end
+ end.
+
+%% shortcut_specific_label([Instruction], FailLabel, PrevCondition) ->
+%% FailLabel | error
+%% We have previously encountered a test instruction that may or
+%% may not branch to FailLabel. Therefore we are only allowed
+%% to do the shortcut to the same fail label (FailLabel).
+
+shortcut_specific_label([{label,_}|Is], Fail, Prev) ->
+ shortcut_specific_label(Is, Fail, Prev);
+shortcut_specific_label([{select,select_val,R,{f,F},L}|_], Fail, Prev) ->
+ case shortcut_selectval(L, R, F, Prev) of
+ Fail -> Fail;
+ _ -> error
+ end;
+shortcut_specific_label([I|Is], Fail, Prev) ->
+ case normalize_op(I) of
+ error ->
+ error;
+ {Normalized,Fail} ->
+ case will_succeed(Prev, Normalized) of
+ no ->
+ %% Will branch to FailLabel.
+ Fail;
+ yes ->
+ %% Will definitely never branch.
+ shortcut_specific_label(Is, Fail, Prev);
+ maybe ->
+ %% May branch, but still OK since it will branch
+ %% to FailLabel.
+ shortcut_specific_label(Is, Fail, Prev)
+ end;
+ {Normalized,_} ->
+ %% This test instruction will branch to a different
+ %% fail label, if it branches at all.
+ case will_succeed(Prev, Normalized) of
+ yes ->
+ %% Still OK, since the branch will never be
+ %% taken.
+ shortcut_specific_label(Is, Fail, Prev);
+ no ->
+ %% Give up. The branch will definitely be taken
+ %% to a different fail label.
+ error;
+ maybe ->
+ %% Give up. If the branch is taken, it will be
+ %% to a different fail label.
+ error
+ end
+ end.
+
+
+%% shortcut_selectval(List, Reg, Fail, PrevCond) -> FailLabel | error
+%% Try to shortcut a selectval instruction. A selectval instruction
+%% is equivalent to the following instruction sequence:
+%%
+%% is_ne_exact L1 Reg Value1
+%% .
+%% .
+%% .
+%% is_ne_exact LN Reg ValueN
+%% jump DefaultFailLabel
+%%
+shortcut_selectval([Val,{f,Lbl}|T], R, Fail, Prev) ->
+ case will_succeed(Prev, {'=/=',R,get_literal(Val)}) of
+ yes -> shortcut_selectval(T, R, Fail, Prev);
+ no -> Lbl;
+ maybe -> error
+ end;
+shortcut_selectval([], _, Fail, _) -> Fail.
+
+%% will_succeed(PrevCondition, Condition) -> yes | no | maybe
+%% PrevCondition is a condition known to be true. This function
+%% will tell whether Condition will succeed.
+
+will_succeed({Op1,Reg,A}, {Op2,Reg,B}) ->
+ will_succeed_1(Op1, A, Op2, B);
+will_succeed({'=:=',Reg,{literal,A}}, {TypeTest,Reg}) ->
+ case erlang:TypeTest(A) of
+ false -> no;
+ true -> yes
+ end;
+will_succeed({_,_,_}, maybe) ->
+ maybe;
+will_succeed({_,_,_}, Test) when is_tuple(Test) ->
+ maybe.
+
+will_succeed_1('=:=', A, '<', B) ->
+ if
+ B =< A -> no;
+ true -> yes
+ end;
+will_succeed_1('=:=', A, '=<', B) ->
+ if
+ B < A -> no;
+ true -> yes
+ end;
+will_succeed_1('=:=', A, '=:=', B) ->
+ if
+ A =:= B -> yes;
+ true -> no
+ end;
+will_succeed_1('=:=', A, '=/=', B) ->
+ if
+ A =:= B -> no;
+ true -> yes
+ end;
+will_succeed_1('=:=', A, '>=', B) ->
+ if
+ B > A -> no;
+ true -> yes
+ end;
+will_succeed_1('=:=', A, '>', B) ->
+ if
+ B >= A -> no;
+ true -> yes
+ end;
+
+will_succeed_1('=/=', A, '=/=', B) when A =:= B -> yes;
+will_succeed_1('=/=', A, '=:=', B) when A =:= B -> no;
+
+will_succeed_1('<', A, '=:=', B) when B >= A -> no;
+will_succeed_1('<', A, '=/=', B) when B >= A -> yes;
+will_succeed_1('<', A, '<', B) when B >= A -> yes;
+will_succeed_1('<', A, '=<', B) when B > A -> yes;
+will_succeed_1('<', A, '>=', B) when B > A -> no;
+will_succeed_1('<', A, '>', B) when B >= A -> no;
+
+will_succeed_1('=<', A, '=:=', B) when B > A -> no;
+will_succeed_1('=<', A, '=/=', B) when B > A -> yes;
+will_succeed_1('=<', A, '<', B) when B > A -> yes;
+will_succeed_1('=<', A, '=<', B) when B >= A -> yes;
+will_succeed_1('=<', A, '>=', B) when B > A -> no;
+will_succeed_1('=<', A, '>', B) when B >= A -> no;
+
+will_succeed_1('>=', A, '=:=', B) when B < A -> no;
+will_succeed_1('>=', A, '=/=', B) when B < A -> yes;
+will_succeed_1('>=', A, '<', B) when B =< A -> no;
+will_succeed_1('>=', A, '=<', B) when B < A -> no;
+will_succeed_1('>=', A, '>=', B) when B =< A -> yes;
+will_succeed_1('>=', A, '>', B) when B < A -> yes;
+
+will_succeed_1('>', A, '=:=', B) when B =< A -> no;
+will_succeed_1('>', A, '=/=', B) when B =< A -> yes;
+will_succeed_1('>', A, '<', B) when B =< A -> no;
+will_succeed_1('>', A, '=<', B) when B < A -> no;
+will_succeed_1('>', A, '>=', B) when B =< A -> yes;
+will_succeed_1('>', A, '>', B) when B < A -> yes;
+
+will_succeed_1(_, _, _, _) -> maybe.
+
+%% normalize_op(Instruction) -> {Normalized,FailLabel} | error
+%% Normalized = {Operator,Register,Literal} |
+%% {TypeTest,Register} |
+%% maybe
+%% Operation = '<' | '=<' | '=:=' | '=/=' | '>=' | '>'
+%% TypeTest = is_atom | is_integer ...
+%% Literal = {literal,Term}
+%%
+%% Normalize a relational operator to facilitate further
+%% comparisons between operators. Always make the register
+%% operand the first operand. Thus the following instruction:
+%%
+%% {test,is_ge,{f,99},{integer,13},{x,0}}
+%%
+%% will be normalized to:
+%%
+%% {'=<',{x,0},{literal,13}}
+%%
+%% NOTE: Bit syntax test instructions are scary. They may change the
+%% state of match contexts and update registers, so we don't dare
+%% mess with them.
+
+normalize_op({test,is_ge,{f,Fail},Ops}) ->
+ normalize_op_1('>=', Ops, Fail);
+normalize_op({test,is_lt,{f,Fail},Ops}) ->
+ normalize_op_1('<', Ops, Fail);
+normalize_op({test,is_eq_exact,{f,Fail},Ops}) ->
+ normalize_op_1('=:=', Ops, Fail);
+normalize_op({test,is_ne_exact,{f,Fail},Ops}) ->
+ normalize_op_1('=/=', Ops, Fail);
+normalize_op({test,is_nil,{f,Fail},[R]}) ->
+ normalize_op_1('=:=', [R,nil], Fail);
+normalize_op({test,Op,{f,Fail},[R]}) ->
+ case erl_internal:new_type_test(Op, 1) of
+ true -> {{Op,R},Fail};
+ false -> {maybe,Fail}
+ end;
+normalize_op({test,_,{f,Fail},_}=I) ->
+ case beam_utils:is_pure_test(I) of
+ true -> {maybe,Fail};
+ false -> error
+ end;
+normalize_op(_) ->
+ error.
+
+normalize_op_1(Op, [Op1,Op2], Fail) ->
+ case {get_literal(Op1),get_literal(Op2)} of
+ {error,error} ->
+ %% Both operands are registers.
+ {maybe,Fail};
+ {error,Lit} ->
+ {{Op,Op1,Lit},Fail};
+ {Lit,error} ->
+ {{turn_op(Op),Op2,Lit},Fail};
+ {_,_} ->
+ %% Both operands are literals. Can probably only
+ %% happen if the Core Erlang optimizations passes were
+ %% turned off, so don't bother trying to do something
+ %% smart here.
+ {maybe,Fail}
+ end.
+
+turn_op('<') -> '>';
+turn_op('>=') -> '=<';
+turn_op('=:='=Op) -> Op;
+turn_op('=/='=Op) -> Op.
+
+negate_op('>=') -> '<';
+negate_op('<') -> '>=';
+negate_op('=<') -> '>';
+negate_op('>') -> '=<';
+negate_op('=:=') -> '=/=';
+negate_op('=/=') -> '=:='.
+
+get_literal({atom,Val}) ->
+ {literal,Val};
+get_literal({integer,Val}) ->
+ {literal,Val};
+get_literal({float,Val}) ->
+ {literal,Val};
+get_literal(nil) ->
+ {literal,[]};
+get_literal({literal,_}=Lit) ->
+ Lit;
+get_literal({_,_}) -> error.
diff --git a/lib/compiler/src/beam_flatten.erl b/lib/compiler/src/beam_flatten.erl
index 05d067dc48..54e06df995 100644
--- a/lib/compiler/src/beam_flatten.erl
+++ b/lib/compiler/src/beam_flatten.erl
@@ -63,9 +63,7 @@ norm({set,[],[S,D],{set_tuple_element,I}}) -> {set_tuple_element,S,D,I};
norm({set,[D1,D2],[S],get_list}) -> {get_list,S,D1,D2};
norm({set,[D],[S|Puts],{alloc,R,{put_map,Op,F}}}) ->
{put_map,F,Op,S,D,R,{list,Puts}};
-norm({set,Ds,[S|Ss],{get_map_elements,F}}) ->
- Gets = beam_utils:joineven(Ss,Ds),
- {get_map_elements,F,S,{list,Gets}};
+%% get_map_elements is always handled in beam_split (moved out of block)
norm({set,[],[],remove_message}) -> remove_message;
norm({set,[],[],fclearerror}) -> fclearerror;
norm({set,[],[],fcheckerror}) -> {fcheckerror,{f,0}}.
diff --git a/lib/compiler/src/beam_jump.erl b/lib/compiler/src/beam_jump.erl
index b952139f2c..ba71d4efae 100644
--- a/lib/compiler/src/beam_jump.erl
+++ b/lib/compiler/src/beam_jump.erl
@@ -166,6 +166,12 @@ share_1([{label,L}=Lbl|Is], Dict0, Seq, Acc) ->
end;
share_1([{func_info,_,_,_}=I|Is], _, [], Acc) ->
reverse(Is, [I|Acc]);
+share_1([{'try',_,_}=I|Is], Dict0, Seq, Acc) ->
+ Dict = clean_non_sharable(Dict0),
+ share_1(Is, Dict, [I|Seq], Acc);
+share_1([{try_case,_}=I|Is], Dict0, Seq, Acc) ->
+ Dict = clean_non_sharable(Dict0),
+ share_1(Is, Dict, [I|Seq], Acc);
share_1([I|Is], Dict, Seq, Acc) ->
case is_unreachable_after(I) of
false ->
@@ -174,6 +180,24 @@ share_1([I|Is], Dict, Seq, Acc) ->
share_1(Is, Dict, [I], Acc)
end.
+clean_non_sharable(Dict) ->
+ %% We are passing in or out of a 'try' block. Remove
+ %% sequences that should not shared over the boundaries
+ %% of a 'try' block. Since the end of the sequence must match,
+ %% the only possible match between a sequence outside and
+ %% a sequence inside the 'try' block is a sequence that ends
+ %% with an instruction that causes an exception. Any sequence
+ %% that causes an exception must contain a line/1 instruction.
+ dict:filter(fun(K, _V) -> sharable_with_try(K) end, Dict).
+
+sharable_with_try([{line,_}|_]) ->
+ %% This sequence may cause an exception and may potentially
+ %% match a sequence on the other side of the 'try' block
+ %% boundary.
+ false;
+sharable_with_try([_|Is]) ->
+ sharable_with_try(Is);
+sharable_with_try([]) -> true.
%% Eliminate all fallthroughs. Return the result reversed.
@@ -295,12 +319,6 @@ opt([{test,_,{f,_}=Lbl,_,_,_}=I|Is], Acc, St) ->
opt(Is, [I|Acc], label_used(Lbl, St));
opt([{select,_,_R,Fail,Vls}=I|Is], Acc, St) ->
skip_unreachable(Is, [I|Acc], label_used([Fail|Vls], St));
-opt([{label,L}=I|Is], Acc, #st{entry=L}=St) ->
- %% NEVER move the entry label.
- opt(Is, [I|Acc], St);
-opt([{label,L1},{jump,{f,L2}}=I|Is], [Prev|Acc], St0) ->
- St = St0#st{mlbl=dict:append(L2, L1, St0#st.mlbl)},
- opt([Prev,I|Is], Acc, label_used({f,L2}, St));
opt([{label,Lbl}=I|Is], Acc, #st{mlbl=Mlbl}=St0) ->
case dict:find(Lbl, Mlbl) of
{ok,Lbls} ->
@@ -310,9 +328,20 @@ opt([{label,Lbl}=I|Is], Acc, #st{mlbl=Mlbl}=St0) ->
insert_labels([Lbl|Lbls], Is, Acc, St);
error -> opt(Is, [I|Acc], St0)
end;
-opt([{jump,{f,Lbl}},{label,Lbl}=I|Is], Acc, St) ->
- opt([I|Is], Acc, St);
-opt([{jump,Lbl}=I|Is], Acc, St) ->
+opt([{jump,{f,_}=X}|[{label,_},{jump,X}|_]=Is], Acc, St) ->
+ opt(Is, Acc, St);
+opt([{jump,{f,Lbl}}|[{label,Lbl}|_]=Is], Acc, St) ->
+ opt(Is, Acc, St);
+opt([{jump,{f,L}=Lbl}=I|Is], Acc0, #st{mlbl=Mlbl0}=St0) ->
+ %% All labels before this jump instruction should now be
+ %% moved to the location of the jump's target.
+ {Lbls,Acc} = collect_labels(Acc0, St0),
+ St = case Lbls of
+ [] -> St0;
+ [_|_] ->
+ Mlbl = dict:append_list(L, Lbls, Mlbl0),
+ St0#st{mlbl=Mlbl}
+ end,
skip_unreachable(Is, [I|Acc], label_used(Lbl, St));
%% Optimization: quickly handle some common instructions that don't
%% have any failure labels and where is_unreachable_after(I) =:= false.
@@ -349,6 +378,17 @@ insert_fc_labels([L|Ls], Mlbl, Acc0) ->
end;
insert_fc_labels([], _, Acc) -> Acc.
+collect_labels(Is, #st{entry=Entry}) ->
+ collect_labels_1(Is, Entry, []).
+
+collect_labels_1([{label,Entry}|_]=Is, Entry, Acc) ->
+ %% Never move the entry label.
+ {Acc,Is};
+collect_labels_1([{label,L}|Is], Entry, Acc) ->
+ collect_labels_1(Is, Entry, [L|Acc]);
+collect_labels_1(Is, _Entry, Acc) ->
+ {Acc,Is}.
+
%% label_defined(Is, Label) -> true | false.
%% Test whether the label Label is defined at the start of the instruction
%% sequence, possibly preceeded by other label definitions.
@@ -435,14 +475,14 @@ is_label_used_in(Lbl, Is) ->
is_label_used_in_1(Is, Lbl, gb_sets:empty()).
is_label_used_in_1([{block,Block}|Is], Lbl, Empty) ->
- lists:any(fun(I) -> is_label_used_in_2(I, Lbl) end, Block)
+ lists:any(fun(I) -> is_label_used_in_block(I, Lbl) end, Block)
orelse is_label_used_in_1(Is, Lbl, Empty);
is_label_used_in_1([I|Is], Lbl, Empty) ->
Used = ulbl(I, Empty),
gb_sets:is_member(Lbl, Used) orelse is_label_used_in_1(Is, Lbl, Empty);
is_label_used_in_1([], _, _) -> false.
-is_label_used_in_2({set,_,_,Info}, Lbl) ->
+is_label_used_in_block({set,_,_,Info}, Lbl) ->
case Info of
{bif,_,{f,F}} -> F =:= Lbl;
{alloc,_,{gc_bif,_,{f,F}}} -> F =:= Lbl;
@@ -452,7 +492,6 @@ is_label_used_in_2({set,_,_,Info}, Lbl) ->
{put_tuple,_} -> false;
{get_tuple_element,_} -> false;
{set_tuple_element,_} -> false;
- {get_map_elements,{f,F}} -> F =:= Lbl;
{line,_} -> false;
_ when is_atom(Info) -> false
end.
diff --git a/lib/compiler/src/beam_split.erl b/lib/compiler/src/beam_split.erl
index f5dba314ae..0c62b0bf3d 100644
--- a/lib/compiler/src/beam_split.erl
+++ b/lib/compiler/src/beam_split.erl
@@ -53,9 +53,8 @@ split_block([{set,[D],[S|Puts],{alloc,R,{put_map,Op,{f,Lbl}=Fail}}}|Is],
Bl, Acc) when Lbl =/= 0 ->
split_block(Is, [], [{put_map,Fail,Op,S,D,R,{list,Puts}}|
make_block(Bl, Acc)]);
-split_block([{set,Ds,[S|Ss],{get_map_elements,{f,Lbl}=Fail}}|Is], Bl, Acc)
- when Lbl =/= 0 ->
- Gets = beam_utils:joineven(Ss,Ds),
+split_block([{set,Ds,[S|Ss],{get_map_elements,Fail}}|Is], Bl, Acc) ->
+ Gets = beam_utils:join_even(Ss,Ds),
split_block(Is, [], [{get_map_elements,Fail,S,{list,Gets}}|make_block(Bl, Acc)]);
split_block([{set,[R],[],{'catch',L}}|Is], Bl, Acc) ->
split_block(Is, [], [{'catch',R,L}|make_block(Bl, Acc)]);
diff --git a/lib/compiler/src/beam_type.erl b/lib/compiler/src/beam_type.erl
index cdddad4153..d9713cef0d 100644
--- a/lib/compiler/src/beam_type.erl
+++ b/lib/compiler/src/beam_type.erl
@@ -469,6 +469,7 @@ is_math_bif(erf, 1) -> true;
is_math_bif(erfc, 1) -> true;
is_math_bif(exp, 1) -> true;
is_math_bif(log, 1) -> true;
+is_math_bif(log2, 1) -> true;
is_math_bif(log10, 1) -> true;
is_math_bif(sqrt, 1) -> true;
is_math_bif(atan2, 2) -> true;
diff --git a/lib/compiler/src/beam_utils.erl b/lib/compiler/src/beam_utils.erl
index e82ba82d38..3249024854 100644
--- a/lib/compiler/src/beam_utils.erl
+++ b/lib/compiler/src/beam_utils.erl
@@ -26,7 +26,7 @@
code_at/2,bif_to_test/3,is_pure_test/1,
live_opt/1,delete_live_annos/1,combine_heap_needs/2]).
--export([joineven/2,spliteven/1]).
+-export([join_even/2,split_even/1]).
-import(lists, [member/2,sort/1,reverse/1,splitwith/2]).
@@ -187,7 +187,7 @@ is_pure_test({test,is_lt,_,[_,_]}) -> true;
is_pure_test({test,is_nil,_,[_]}) -> true;
is_pure_test({test,is_nonempty_list,_,[_]}) -> true;
is_pure_test({test,test_arity,_,[_,_]}) -> true;
-is_pure_test({test,has_map_fields,_,[_,{list,_}]}) -> true;
+is_pure_test({test,has_map_fields,_,[_|_]}) -> true;
is_pure_test({test,Op,_,Ops}) ->
erl_internal:new_type_test(Op, length(Ops)).
@@ -758,13 +758,9 @@ live_opt([{line,_}=I|Is], Regs, D, Acc) ->
live_opt(Is, Regs, D, [I|Acc]);
%% The following instructions can occur if the "compilation" has been
-%% started from a .S file using the 'asm' option.
+%% started from a .S file using the 'from_asm' option.
live_opt([{trim,_,_}=I|Is], Regs, D, Acc) ->
live_opt(Is, Regs, D, [I|Acc]);
-live_opt([{allocate,_,Live}=I|Is], _, D, Acc) ->
- live_opt(Is, live_call(Live), D, [I|Acc]);
-live_opt([{allocate_heap,_,_,Live}=I|Is], _, D, Acc) ->
- live_opt(Is, live_call(Live), D, [I|Acc]);
live_opt([{'%',_}=I|Is], Regs, D, Acc) ->
live_opt(Is, Regs, D, [I|Acc]);
live_opt([{recv_set,_}=I|Is], Regs, D, Acc) ->
@@ -835,14 +831,14 @@ x_live([], Regs) -> Regs.
is_live(X, Regs) -> ((Regs bsr X) band 1) =:= 1.
-%% spliteven/1
+%% split_even/1
%% [1,2,3,4,5,6] -> {[1,3,5],[2,4,6]}
-spliteven(Rs) -> spliteven(Rs,[],[]).
-spliteven([],Ss,Ds) -> {reverse(Ss),reverse(Ds)};
-spliteven([S,D|Rs],Ss,Ds) ->
- spliteven(Rs,[S|Ss],[D|Ds]).
+split_even(Rs) -> split_even(Rs,[],[]).
+split_even([],Ss,Ds) -> {reverse(Ss),reverse(Ds)};
+split_even([S,D|Rs],Ss,Ds) ->
+ split_even(Rs,[S|Ss],[D|Ds]).
-%% joineven/1
+%% join_even/1
%% {[1,3,5],[2,4,6]} -> [1,2,3,4,5,6]
-joineven([],[]) -> [];
-joineven([S|Ss],[D|Ds]) -> [S,D|joineven(Ss,Ds)].
+join_even([],[]) -> [];
+join_even([S|Ss],[D|Ds]) -> [S,D|join_even(Ss,Ds)].
diff --git a/lib/compiler/src/beam_validator.erl b/lib/compiler/src/beam_validator.erl
index 0acc7a227f..e60184c929 100644
--- a/lib/compiler/src/beam_validator.erl
+++ b/lib/compiler/src/beam_validator.erl
@@ -22,7 +22,6 @@
%% Avoid warning for local function error/1 clashing with autoimported BIF.
-compile({no_auto_import,[error/1]}).
--export([file/1, files/1]).
%% Interface for compiler.
-export([module/2, format_error/1]).
@@ -40,38 +39,12 @@
-define(DBG_FORMAT(F, D), ok).
-endif.
-%%%
-%%% API functions.
-%%%
-
--spec file(file:filename()) -> 'ok' | {'error', term()}.
-
-file(Name) when is_list(Name) ->
- case case filename:extension(Name) of
- ".S" -> s_file(Name);
- ".beam" -> beam_file(Name)
- end of
- [] -> ok;
- Es -> {error,Es}
- end.
-
--spec files([file:filename()]) -> 'ok'.
-
-files([F|Fs]) ->
- ?DBG_FORMAT("# Verifying: ~p~n", [F]),
- case file(F) of
- ok -> ok;
- {error,Es} ->
- io:format("~tp:~n~ts~n", [F,format_error(Es)])
- end,
- files(Fs);
-files([]) -> ok.
-
%% To be called by the compiler.
module({Mod,Exp,Attr,Fs,Lc}=Code, _Opts)
when is_atom(Mod), is_list(Exp), is_list(Attr), is_integer(Lc) ->
case validate(Mod, Fs) of
- [] -> {ok,Code};
+ [] ->
+ {ok,Code};
Es0 ->
Es = [{?MODULE,E} || E <- Es0],
{error,[{atom_to_list(Mod),Es}]}
@@ -79,12 +52,6 @@ module({Mod,Exp,Attr,Fs,Lc}=Code, _Opts)
-spec format_error(term()) -> iolist().
-format_error([]) -> [];
-format_error([{{M,F,A},{I,Off,Desc}}|Es]) ->
- [io_lib:format(" ~p:~p/~p+~p:~n ~p - ~p~n",
- [M,F,A,Off,I,Desc])|format_error(Es)];
-format_error([Error|Es]) ->
- [format_error(Error)|format_error(Es)];
format_error({{_M,F,A},{I,Off,limit}}) ->
io_lib:format(
"function ~p/~p+~p:~n"
@@ -103,8 +70,6 @@ format_error({{_M,F,A},{I,Off,Desc}}) ->
" Internal consistency check failed - please report this bug.~n"
" Instruction: ~p~n"
" Error: ~p:~n", [F,A,Off,I,Desc]);
-format_error({Module,Error}) ->
- [Module:format_error(Error)];
format_error(Error) ->
io_lib:format("~p~n", [Error]).
@@ -112,36 +77,6 @@ format_error(Error) ->
%%% Local functions follow.
%%%
-s_file(Name) ->
- {ok,Is} = file:consult(Name),
- {module,Module} = lists:keyfind(module, 1, Is),
- Fs = find_functions(Is),
- validate(Module, Fs).
-
-find_functions(Fs) ->
- find_functions_1(Fs, none, [], []).
-
-find_functions_1([{function,Name,Arity,Entry}|Is], Func, FuncAcc, Acc0) ->
- Acc = add_func(Func, FuncAcc, Acc0),
- find_functions_1(Is, {Name,Arity,Entry}, [], Acc);
-find_functions_1([I|Is], Func, FuncAcc, Acc) ->
- find_functions_1(Is, Func, [I|FuncAcc], Acc);
-find_functions_1([], Func, FuncAcc, Acc) ->
- reverse(add_func(Func, FuncAcc, Acc)).
-
-add_func(none, _, Acc) -> Acc;
-add_func({Name,Arity,Entry}, Is, Acc) ->
- [{function,Name,Arity,Entry,reverse(Is)}|Acc].
-
-beam_file(Name) ->
- try beam_disasm:file(Name) of
- {error,beam_lib,Reason} -> [{beam_lib,Reason}];
- #beam_file{module=Module, code=Code0} ->
- Code = normalize_disassembled_code(Code0),
- validate(Module, Code)
- catch _:_ -> [disassembly_failed]
- end.
-
%%%
%%% The validator follows.
%%%
@@ -196,23 +131,16 @@ validate_0(Module, [{function,Name,Ar,Entry,Code}|Fs], Ft) ->
try validate_1(Code, Name, Ar, Entry, Ft) of
_ -> validate_0(Module, Fs, Ft)
catch
- Error ->
+ throw:Error ->
+ %% Controlled error.
[Error|validate_0(Module, Fs, Ft)];
- error:Error ->
- [validate_error(Error, Module, Name, Ar)|validate_0(Module, Fs, Ft)]
+ Class:Error ->
+ %% Crash.
+ Stack = erlang:get_stacktrace(),
+ io:fwrite("Function: ~w/~w\n", [Name,Ar]),
+ erlang:raise(Class, Error, Stack)
end.
--ifdef(DEBUG).
-validate_error(Error, Module, Name, Ar) ->
- exit(validate_error_1(Error, Module, Name, Ar)).
--else.
-validate_error(Error, Module, Name, Ar) ->
- validate_error_1(Error, Module, Name, Ar).
--endif.
-validate_error_1(Error, Module, Name, Ar) ->
- {{Module,Name,Ar},
- {internal_error,'_',{Error,erlang:get_stacktrace()}}}.
-
-type index() :: non_neg_integer().
-type reg_tab() :: gb_trees:tree(index(), 'none' | {'value', _}).
@@ -225,7 +153,6 @@ validate_error_1(Error, Module, Name, Ar) ->
hf=0, %Available heap size for floats.
fls=undefined, %Floating point state.
ct=[], %List of hot catch/try labels
- bsm=undefined, %Bit syntax matching state.
bits=undefined, %Number of bits in bit syntax binary.
setelem=false %Previous instruction was setelement/3.
}).
@@ -308,7 +235,7 @@ labels_1([{label,L}|Is], R) ->
labels_1([{line,_}|Is], R) ->
labels_1(Is, R);
labels_1(Is, R) ->
- {lists:reverse(R),Is}.
+ {reverse(R),Is}.
init_state(Arity) ->
Xs = init_regs(Arity, term),
@@ -403,10 +330,6 @@ valfun_1({init,{y,_}=Reg}, Vst) ->
set_type_y(initialized, Reg, Vst);
valfun_1({test_heap,Heap,Live}, Vst) ->
test_heap(Heap, Live, Vst);
-valfun_1({bif,_Op,nofail,Src,Dst}, Vst) ->
- %% The 'nofail' atom only occurs in disassembled code.
- validate_src(Src, Vst),
- set_type_reg(term, Dst, Vst);
valfun_1({bif,Op,{f,_},Src,Dst}=I, Vst) ->
case is_bif_safe(Op, length(Src)) of
false ->
@@ -432,9 +355,6 @@ valfun_1({put_tuple,Sz,Dst}, Vst0) when is_integer(Sz) ->
valfun_1({put,Src}, Vst) ->
assert_term(Src, Vst),
eat_heap(1, Vst);
-valfun_1({put_string,Sz,_,Dst}, Vst0) when is_integer(Sz) ->
- Vst = eat_heap(2*Sz, Vst0),
- set_type_reg(cons, Dst, Vst);
%% Instructions for optimization of selective receives.
valfun_1({recv_mark,{f,Fail}}, Vst) when is_integer(Fail) ->
Vst;
@@ -602,8 +522,6 @@ valfun_4({call_ext_last,Live,Func,StkSize},
tail_call(Func, Live, Vst);
valfun_4({call_ext_last,_,_,_}, #vst{current=#st{numy=NumY}}) ->
error({allocated,NumY});
-valfun_4({make_fun,_,_,Live}, Vst) ->
- call('fun', Live, Vst);
valfun_4({make_fun2,_,_,_,Live}, Vst) ->
call(make_fun, Live, Vst);
%% Other BIFs
@@ -620,8 +538,6 @@ valfun_4({bif,element,{f,Fail},[Pos,Tuple],Dst}, Vst0) ->
TupleType = upgrade_tuple_type({tuple,[get_tuple_size(PosType)]}, TupleType0),
Vst = set_type(TupleType, Tuple, Vst1),
set_type_reg(term, Dst, Vst);
-valfun_4({raise,{f,_}=Fail,Src,Dst}, Vst) ->
- valfun_4({bif,raise,Fail,Src,Dst}, Vst);
valfun_4({bif,Op,{f,Fail},Src,Dst}, Vst0) ->
validate_src(Src, Vst0),
Vst = branch_state(Fail, Vst0),
@@ -738,32 +654,6 @@ valfun_4({bs_save2,Ctx,SavePoint}, Vst) ->
valfun_4({bs_restore2,Ctx,SavePoint}, Vst) ->
bsm_restore(Ctx, SavePoint, Vst);
-%% Bit syntax instructions.
-valfun_4({bs_start_match,{f,_Fail}=F,Src}, Vst) ->
- valfun_4({test,bs_start_match,F,[Src]}, Vst);
-valfun_4({test,bs_start_match,{f,Fail},[Src]}, Vst) ->
- assert_term(Src, Vst),
- bs_start_match(branch_state(Fail, Vst));
-
-valfun_4({bs_save,SavePoint}, Vst) ->
- bs_assert_state(Vst),
- bs_save(SavePoint, Vst);
-valfun_4({bs_restore,SavePoint}, Vst) ->
- bs_assert_state(Vst),
- bs_assert_savepoint(SavePoint, Vst),
- Vst;
-valfun_4({test,bs_skip_bits,{f,Fail},[Src,_,_]}, Vst) ->
- bs_assert_state(Vst),
- assert_term(Src, Vst),
- branch_state(Fail, Vst);
-valfun_4({test,bs_test_tail,{f,Fail},_}, Vst) ->
- bs_assert_state(Vst),
- branch_state(Fail, Vst);
-valfun_4({test,_,{f,Fail},[_,_,_,Dst]}, Vst0) ->
- bs_assert_state(Vst0),
- Vst = branch_state(Fail, Vst0),
- set_type_reg({integer,[]}, Dst, Vst);
-
%% Other test instructions.
valfun_4({test,is_float,{f,Lbl},[Float]}, Vst) ->
assert_term(Float, Vst),
@@ -795,9 +685,6 @@ valfun_4({bs_utf8_size,{f,Fail},A,Dst}, Vst) ->
valfun_4({bs_utf16_size,{f,Fail},A,Dst}, Vst) ->
assert_term(A, Vst),
set_type_reg({integer,[]}, Dst, branch_state(Fail, Vst));
-valfun_4({bs_bits_to_bytes,{f,Fail},Src,Dst}, Vst) ->
- assert_term(Src, Vst),
- set_type_reg({integer,[]}, Dst, branch_state(Fail, Vst));
valfun_4({bs_init2,{f,Fail},Sz,Heap,Live,_,Dst}, Vst0) ->
verify_live(Live, Vst0),
if
@@ -868,16 +755,6 @@ valfun_4({bs_put_utf32,{f,Fail},_,Src}=I, Vst0) ->
assert_term(Src, Vst0),
Vst = bs_align_check(I, Vst0),
branch_state(Fail, Vst);
-%% Old bit syntax construction (before R10B).
-valfun_4({bs_init,_,_}, Vst) ->
- bs_zero_bits(Vst);
-valfun_4({bs_need_buf,_}, Vst) -> Vst;
-valfun_4({bs_final,{f,Fail},Dst}, Vst0) ->
- Vst = branch_state(Fail, Vst0),
- set_type_reg(binary, Dst, Vst);
-valfun_4({bs_final2,Src,Dst}, Vst0) ->
- assert_term(Src, Vst0),
- set_type_reg(binary, Dst, Vst0);
%% Map instructions.
valfun_4({put_map_assoc,{f,Fail},Src,Dst,Live,{list,List}}, Vst) ->
verify_put_map(Fail, Src, Dst, Live, List, Vst);
@@ -891,10 +768,14 @@ valfun_4(_, _) ->
verify_get_map(Fail, Src, List, Vst0) ->
assert_term(Src, Vst0),
Vst1 = branch_state(Fail, Vst0),
- Lits = mmap(fun(L,_R) -> [L] end, List),
- assert_strict_literal_termorder(Lits),
+ Keys = extract_map_keys(List),
+ assert_strict_literal_termorder(Keys),
verify_get_map_pair(List,Vst0,Vst1).
+extract_map_keys([Key,_Val|T]) ->
+ [Key|extract_map_keys(T)];
+extract_map_keys([]) -> [].
+
verify_get_map_pair([],_,Vst) -> Vst;
verify_get_map_pair([Src,Dst|Vs],Vst0,Vsti) ->
assert_term(Src, Vst0),
@@ -936,9 +817,6 @@ validate_bs_skip_utf(Fail, Ctx, Live, Vst0) ->
%%
val_dsetel({move,_,_}, Vst) ->
Vst;
-val_dsetel({put_string,0,{string,""},_}, Vst) ->
- %% An empty string is OK since it doesn't build anything.
- Vst;
val_dsetel({call_ext,3,{extfunc,erlang,setelement,3}}, #vst{current=St}=Vst) ->
Vst#vst{current=St#st{setelem=true}};
val_dsetel({set_tuple_element,_,_,_}, #vst{current=#st{setelem=false}}) ->
@@ -972,7 +850,7 @@ call(Name, Live, #vst{current=St}=Vst) ->
Type when Type =/= exception ->
%% Type is never 'exception' because it has been handled earlier.
Xs = gb_trees_from_list([{0,Type}]),
- Vst#vst{current=St#st{x=Xs,f=init_fregs(),bsm=undefined}}
+ Vst#vst{current=St#st{x=Xs,f=init_fregs()}}
end.
%% Tail call.
@@ -1030,7 +908,7 @@ allocate(_, _, _, _, #vst{current=#st{numy=Numy}}) ->
error({existing_stack_frame,{size,Numy}}).
deallocate(#vst{current=St}=Vst) ->
- Vst#vst{current=St#st{y=init_regs(0, initialized),numy=none,bsm=undefined}}.
+ Vst#vst{current=St#st{y=init_regs(0, initialized),numy=none}}.
test_heap(Heap, Live, Vst0) ->
verify_live(Live, Vst0),
@@ -1038,7 +916,7 @@ test_heap(Heap, Live, Vst0) ->
heap_alloc(Heap, Vst).
heap_alloc(Heap, #vst{current=St0}=Vst) ->
- St1 = kill_heap_allocation(St0#st{bsm=undefined}),
+ St1 = kill_heap_allocation(St0),
St = heap_alloc_1(Heap, St1),
Vst#vst{current=St}.
@@ -1122,74 +1000,30 @@ assert_freg_set(Fr, _) -> error({bad_source,Fr}).
%%% Maps
-%% ensure that a list of literals has a strict
-%% ascending term order (also meaning unique literals).
-%% Single item lists may have registers.
-assert_strict_literal_termorder([_]) -> ok;
-assert_strict_literal_termorder(Ls) ->
- Vs = lists:map(fun (L) -> get_literal(L) end, Ls),
+%% A single item list may be either a list or a register.
+%%
+%% A list with more than item must contain literals in
+%% ascending term order.
+%%
+%% An empty list is not allowed.
+
+assert_strict_literal_termorder([]) ->
+ %% There is no reason to use the get_map_elements and
+ %% has_map_fields instructions with empty lists.
+ error(empty_field_list);
+assert_strict_literal_termorder([_]) ->
+ ok;
+assert_strict_literal_termorder([_,_|_]=Ls) ->
+ Vs = [get_literal(L) || L <- Ls],
case check_strict_value_termorder(Vs) of
true -> ok;
- false -> error({not_strict_order, Ls})
- end.
-
-%% usage:
-%% mmap(fun(A,B) -> [{A,B}] end, [1,2,3,4]),
-%% [{1,2},{3,4}]
-
-mmap(F,List) ->
- {arity,Ar} = erlang:fun_info(F,arity),
- mmap(F,Ar,List).
-mmap(_F,_,[]) -> [];
-mmap(F,Ar,List) ->
- {Hd,Tl} = lists:split(Ar,List),
- apply(F,Hd) ++ mmap(F,Ar,Tl).
-
-check_strict_value_termorder([]) -> true;
-check_strict_value_termorder([_]) -> true;
-check_strict_value_termorder([V1,V2]) ->
- erts_internal:cmp_term(V1,V2) < 0;
-check_strict_value_termorder([V1,V2|Vs]) ->
- case erts_internal:cmp_term(V1,V2) < 0 of
- true -> check_strict_value_termorder([V2|Vs]);
- false -> false
- end.
-
-%%%
-%%% Binary matching.
-%%%
-%%% Possible values for the bsm field (=bit syntax matching state).
-%%%
-%%% undefined - Undefined (initial state). No matching instructions allowed.
-%%%
-%%% (gb set) - The gb set contains the defined save points.
-%%%
-%%% The bsm field is reset to 'undefined' by instructions that may cause a
-%%% a garbage collection (might move the binary) and/or context switch
-%%% (may invalidate the save points).
-
-bs_start_match(#vst{current=#st{bsm=undefined}=St}=Vst) ->
- Vst#vst{current=St#st{bsm=gb_sets:empty()}};
-bs_start_match(Vst) ->
- %% Must retain save points here - it is possible to restore back
- %% to a previous binary.
- Vst.
-
-bs_save(Reg, #vst{current=#st{bsm=Saved}=St}=Vst)
- when is_integer(Reg), Reg < ?MAXREG ->
- Vst#vst{current=St#st{bsm=gb_sets:add(Reg, Saved)}};
-bs_save(_, _) -> error(limit).
-
-bs_assert_savepoint(Reg, #vst{current=#st{bsm=Saved}}) ->
- case gb_sets:is_member(Reg, Saved) of
- false -> error({no_save_point,Reg});
- true -> ok
+ false -> error(not_strict_order)
end.
-bs_assert_state(#vst{current=#st{bsm=undefined}}) ->
- error(no_bs_match_state);
-bs_assert_state(_) -> ok.
-
+check_strict_value_termorder([V1|[V2|_]=Vs]) ->
+ erts_internal:cmp_term(V1, V2) < 0 andalso
+ check_strict_value_termorder(Vs);
+check_strict_value_termorder([_]) -> true.
%%%
%%% New binary matching instructions.
@@ -1525,14 +1359,13 @@ merge_states(L, St, Branched) when L =/= 0 ->
{value,OtherSt} -> merge_states_1(St, OtherSt)
end.
-merge_states_1(#st{x=Xs0,y=Ys0,numy=NumY0,h=H0,ct=Ct0,bsm=Bsm0}=St,
- #st{x=Xs1,y=Ys1,numy=NumY1,h=H1,ct=Ct1,bsm=Bsm1}) ->
+merge_states_1(#st{x=Xs0,y=Ys0,numy=NumY0,h=H0,ct=Ct0}=St,
+ #st{x=Xs1,y=Ys1,numy=NumY1,h=H1,ct=Ct1}) ->
NumY = merge_stk(NumY0, NumY1),
Xs = merge_regs(Xs0, Xs1),
Ys = merge_y_regs(Ys0, Ys1),
Ct = merge_ct(Ct0, Ct1),
- Bsm = merge_bsm(Bsm0, Bsm1),
- St#st{x=Xs,y=Ys,numy=NumY,h=min(H0, H1),ct=Ct,bsm=Bsm}.
+ St#st{x=Xs,y=Ys,numy=NumY,h=min(H0, H1),ct=Ct}.
merge_stk(S, S) -> S;
merge_stk(_, _) -> undecided.
@@ -1615,10 +1448,6 @@ merge_types(T1, T2) when T1 =/= T2 ->
%% Too different. All we know is that the type is a 'term'.
term.
-merge_bsm(undefined, _) -> undefined;
-merge_bsm(_, undefined) -> undefined;
-merge_bsm(Bsm0, Bsm1) -> gb_sets:intersection(Bsm0, Bsm1).
-
tuple_sz([Sz]) -> Sz;
tuple_sz(Sz) -> Sz.
@@ -1818,6 +1647,7 @@ return_type_math(erf, 1) -> {float,[]};
return_type_math(erfc, 1) -> {float,[]};
return_type_math(exp, 1) -> {float,[]};
return_type_math(log, 1) -> {float,[]};
+return_type_math(log2, 1) -> {float,[]};
return_type_math(log10, 1) -> {float,[]};
return_type_math(sqrt, 1) -> {float,[]};
return_type_math(atan2, 2) -> {float,[]};
@@ -1839,52 +1669,3 @@ error(Error) -> exit(Error).
-else.
error(Error) -> throw(Error).
-endif.
-
-
-%%%
-%%% Rewrite disassembled code to the same format as we used internally
-%%% to not have to worry later.
-%%%
-
-normalize_disassembled_code(Fs) ->
- Index = ndc_index(Fs, []),
- ndc(Fs, Index, []).
-
-ndc_index([{function,Name,Arity,Entry,_Code}|Fs], Acc) ->
- ndc_index(Fs, [{{Name,Arity},Entry}|Acc]);
-ndc_index([], Acc) ->
- gb_trees:from_orddict(lists:sort(Acc)).
-
-ndc([{function,Name,Arity,Entry,Code0}|Fs], D, Acc) ->
- Code = ndc_1(Code0, D, []),
- ndc(Fs, D, [{function,Name,Arity,Entry,Code}|Acc]);
-ndc([], _, Acc) -> reverse(Acc).
-
-ndc_1([{call=Op,A,{_,F,A}}|Is], D, Acc) ->
- ndc_1(Is, D, [{Op,A,{f,gb_trees:get({F,A}, D)}}|Acc]);
-ndc_1([{call_only=Op,A,{_,F,A}}|Is], D, Acc) ->
- ndc_1(Is, D, [{Op,A,{f,gb_trees:get({F,A}, D)}}|Acc]);
-ndc_1([{call_last=Op,A,{_,F,A},Sz}|Is], D, Acc) ->
- ndc_1(Is, D, [{Op,A,{f,gb_trees:get({F,A}, D)},Sz}|Acc]);
-ndc_1([{arithbif,Op,F,Src,Dst}|Is], D, Acc) ->
- ndc_1(Is, D, [{bif,Op,F,Src,Dst}|Acc]);
-ndc_1([{arithfbif,Op,F,Src,Dst}|Is], D, Acc) ->
- ndc_1(Is, D, [{bif,Op,F,Src,Dst}|Acc]);
-ndc_1([{test,bs_start_match2=Op,F,[A1,Live,A3,Dst]}|Is], D, Acc) ->
- ndc_1(Is, D, [{test,Op,F,Live,[A1,A3],Dst}|Acc]);
-ndc_1([{test,bs_get_binary2=Op,F,[A1,Live,A3,A4,A5,Dst]}|Is], D, Acc) ->
- ndc_1(Is, D, [{test,Op,F,Live,[A1,A3,A4,A5],Dst}|Acc]);
-ndc_1([{test,bs_get_float2=Op,F,[A1,Live,A3,A4,A5,Dst]}|Is], D, Acc) ->
- ndc_1(Is, D, [{test,Op,F,Live,[A1,A3,A4,A5],Dst}|Acc]);
-ndc_1([{test,bs_get_integer2=Op,F,[A1,Live,A3,A4,A5,Dst]}|Is], D, Acc) ->
- ndc_1(Is, D, [{test,Op,F,Live,[A1,A3,A4,A5],Dst}|Acc]);
-ndc_1([{test,bs_get_utf8=Op,F,[A1,Live,A3,Dst]}|Is], D, Acc) ->
- ndc_1(Is, D, [{test,Op,F,Live,[A1,A3],Dst}|Acc]);
-ndc_1([{test,bs_get_utf16=Op,F,[A1,Live,A3,Dst]}|Is], D, Acc) ->
- ndc_1(Is, D, [{test,Op,F,Live,[A1,A3],Dst}|Acc]);
-ndc_1([{test,bs_get_utf32=Op,F,[A1,Live,A3,Dst]}|Is], D, Acc) ->
- ndc_1(Is, D, [{test,Op,F,Live,[A1,A3],Dst}|Acc]);
-ndc_1([I|Is], D, Acc) ->
- ndc_1(Is, D, [I|Acc]);
-ndc_1([], _, Acc) ->
- reverse(Acc).
diff --git a/lib/compiler/src/beam_z.erl b/lib/compiler/src/beam_z.erl
index c2a6ef604e..0c7bef9183 100644
--- a/lib/compiler/src/beam_z.erl
+++ b/lib/compiler/src/beam_z.erl
@@ -79,17 +79,9 @@ undo_rename({put_map,Fail,assoc,S,D,R,L}) ->
undo_rename({put_map,Fail,exact,S,D,R,L}) ->
{put_map_exact,Fail,S,D,R,L};
undo_rename({test,has_map_fields,Fail,[Src|List]}) ->
- {test,has_map_fields,Fail,Src,{list,[to_typed_literal(V)||V<-List]}};
-undo_rename({get_map_elements,Fail,Src,{list, List}}) ->
- {get_map_elements,Fail,Src,{list,[to_typed_literal(V)||V<-List]}};
+ {test,has_map_fields,Fail,Src,{list,List}};
+undo_rename({get_map_elements,Fail,Src,{list,List}}) ->
+ {get_map_elements,Fail,Src,{list,List}};
undo_rename({select,I,Reg,Fail,List}) ->
{I,Reg,Fail,{list,List}};
undo_rename(I) -> I.
-
-%% to_typed_literal(Arg)
-%% transform Arg to specific literal i.e. float | integer | atom if applicable
-to_typed_literal({literal, V}) when is_float(V) -> {float, V};
-to_typed_literal({literal, V}) when is_atom(V) -> {atom, V};
-to_typed_literal({literal, V}) when is_integer(V) -> {integer, V};
-to_typed_literal({literal, []}) -> nil;
-to_typed_literal(V) -> V.
diff --git a/lib/compiler/src/cerl.erl b/lib/compiler/src/cerl.erl
index 7a2c3d70de..3d4b9ee0c6 100644
--- a/lib/compiler/src/cerl.erl
+++ b/lib/compiler/src/cerl.erl
@@ -123,11 +123,14 @@
bitstr_flags/1,
%% keep map exports here for now
+ c_map_pattern/1,
+ is_c_map/1,
map_es/1,
map_arg/1,
update_c_map/3,
c_map/1, is_c_map_empty/1,
ann_c_map/2, ann_c_map/3,
+ ann_c_map_pattern/2,
map_pair_op/1,map_pair_key/1,map_pair_val/1,
update_c_map_pair/4,
c_map_pair/2,
@@ -431,6 +434,8 @@ is_literal_term([H | T]) ->
is_literal_term(T) when is_tuple(T) ->
is_literal_term_list(tuple_to_list(T));
is_literal_term(B) when is_bitstring(B) -> true;
+is_literal_term(M) when is_map(M) ->
+ is_literal_term_list(maps:to_list(M));
is_literal_term(_) ->
false.
@@ -1577,6 +1582,20 @@ ann_make_list(_, [], Node) ->
%% ---------------------------------------------------------------------
%% maps
+%% @spec is_c_map(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% map constructor, otherwise <code>false</code>.
+
+-spec is_c_map(cerl()) -> boolean().
+
+is_c_map(#c_map{}) ->
+ true;
+is_c_map(#c_literal{val = V}) when is_map(V) ->
+ true;
+is_c_map(_) ->
+ false.
+
-spec map_es(c_map()) -> [c_map_pair()].
map_es(#c_map{es = Es}) ->
@@ -1590,7 +1609,17 @@ map_arg(#c_map{arg=M}) ->
-spec c_map([c_map_pair()]) -> c_map().
c_map(Pairs) ->
- #c_map{es=Pairs}.
+ ann_c_map([], Pairs).
+
+-spec c_map_pattern([c_map_pair()]) -> c_map().
+
+c_map_pattern(Pairs) ->
+ #c_map{es=Pairs, is_pat=true}.
+
+-spec ann_c_map_pattern([term()], [c_map_pair()]) -> c_map().
+
+ann_c_map_pattern(As, Pairs) ->
+ #c_map{anno=As, es=Pairs, is_pat=true}.
-spec is_c_map_empty(c_map() | c_literal()) -> boolean().
@@ -1598,25 +1627,13 @@ is_c_map_empty(#c_map{ es=[] }) -> true;
is_c_map_empty(#c_literal{val=M}) when is_map(M),map_size(M) =:= 0 -> true;
is_c_map_empty(_) -> false.
--spec ann_c_map([term()], [cerl()]) -> c_map() | c_literal().
+-spec ann_c_map([term()], [c_map_pair()]) -> c_map() | c_literal().
-ann_c_map(As,Es) ->
+ann_c_map(As, Es) ->
ann_c_map(As, #c_literal{val=#{}}, Es).
-spec ann_c_map([term()], c_map() | c_literal(), [c_map_pair()]) -> c_map() | c_literal().
-ann_c_map(As,#c_literal{val=Mval}=M,Es) when is_map(Mval), map_size(Mval) =:= 0 ->
- Pairs = [[Ck,Cv]||#c_map_pair{key=Ck,val=Cv}<-Es],
- IsLit = lists:foldl(fun(Pair,Res) ->
- Res andalso is_lit_list(Pair)
- end, true, Pairs),
- Fun = fun(Pair) -> [K,V] = lit_list_vals(Pair), {K,V} end,
- case IsLit of
- false ->
- #c_map{arg=M, es=Es, anno=As };
- true ->
- #c_literal{anno=As, val=maps:from_list(lists:map(Fun, Pairs))}
- end;
ann_c_map(As,#c_literal{val=M},Es) when is_map(M) ->
fold_map_pairs(As,Es,M);
ann_c_map(As,M,Es) ->
@@ -1644,14 +1661,14 @@ fold_map_pairs(As,[#c_map_pair{op=#c_literal{val=exact},key=Ck,val=Cv}=E|Es],M)
end;
false ->
#c_map{arg=#c_literal{val=M,anno=As}, es=[E|Es], anno=As }
- end;
-fold_map_pairs(As,Es,M) ->
- #c_map{arg=#c_literal{val=M,anno=As}, es=Es, anno=As }.
+ end.
-%-spec update_c_map(c_map() | c_literal(), [c_map_pair()]) -> c_map() | c_literal().
+-spec update_c_map(c_map(), cerl(), [cerl()]) -> c_map() | c_literal().
-update_c_map(Old,M,Es) ->
- #c_map{arg=M, es = Es, anno = get_ann(Old)}.
+update_c_map(#c_map{is_pat=true}=Old, M, Es) ->
+ Old#c_map{arg=M, es=Es};
+update_c_map(#c_map{is_pat=false}=Old, M, Es) ->
+ ann_c_map(get_ann(Old), M, Es).
map_pair_key(#c_map_pair{key=K}) -> K.
map_pair_val(#c_map_pair{val=V}) -> V.
diff --git a/lib/compiler/src/cerl_clauses.erl b/lib/compiler/src/cerl_clauses.erl
index 87bd47c08b..ef74c5b76f 100644
--- a/lib/compiler/src/cerl_clauses.erl
+++ b/lib/compiler/src/cerl_clauses.erl
@@ -354,29 +354,29 @@ match(P, E, Bs) ->
{false, Bs}
end
end;
- map ->
- %% The most we can do is to say "definitely no match" if a
- %% map pattern is matched against non-map data.
- case E of
- any ->
- {false, Bs};
- _ ->
- case type(E) of
- literal ->
- case is_map(concrete(E)) of
- false ->
- none;
- true ->
- {false, Bs}
- end;
- cons ->
- none;
- tuple ->
- none;
- _ ->
- {false, Bs}
- end
- end;
+ map ->
+ %% The most we can do is to say "definitely no match" if a
+ %% map pattern is matched against non-map data.
+ case E of
+ any ->
+ {false, Bs};
+ _ ->
+ case type(E) of
+ literal ->
+ case is_map(concrete(E)) of
+ false ->
+ none;
+ true ->
+ {false, Bs}
+ end;
+ cons ->
+ none;
+ tuple ->
+ none;
+ _ ->
+ {false, Bs}
+ end
+ end;
_ ->
match_1(P, E, Bs)
end.
diff --git a/lib/compiler/src/compiler.app.src b/lib/compiler/src/compiler.app.src
index 8f68915f8e..fbaa7a96fe 100644
--- a/lib/compiler/src/compiler.app.src
+++ b/lib/compiler/src/compiler.app.src
@@ -56,6 +56,7 @@
rec_env,
sys_core_dsetel,
sys_core_fold,
+ sys_core_fold_lists,
sys_core_inline,
sys_pre_attributes,
sys_pre_expand,
diff --git a/lib/compiler/src/core_lib.erl b/lib/compiler/src/core_lib.erl
index 2792fd8fa5..66319dbd36 100644
--- a/lib/compiler/src/core_lib.erl
+++ b/lib/compiler/src/core_lib.erl
@@ -20,6 +20,12 @@
-module(core_lib).
+-deprecated({get_anno,1,next_major_release}).
+-deprecated({set_anno,2,next_major_release}).
+-deprecated({is_literal,1,next_major_release}).
+-deprecated({is_literal_list,1,next_major_release}).
+-deprecated({literal_value,1,next_major_release}).
+
-export([get_anno/1,set_anno/2]).
-export([is_literal/1,is_literal_list/1]).
-export([literal_value/1]).
@@ -33,59 +39,27 @@
%%
-spec get_anno(cerl:cerl()) -> term().
-get_anno(C) -> element(2, C).
+get_anno(C) -> cerl:get_ann(C).
-spec set_anno(cerl:cerl(), term()) -> cerl:cerl().
-set_anno(C, A) -> setelement(2, C, A).
+set_anno(C, A) -> cerl:set_ann(C, A).
-spec is_literal(cerl:cerl()) -> boolean().
-is_literal(#c_literal{}) -> true;
-is_literal(#c_cons{hd=H,tl=T}) ->
- is_literal(H) andalso is_literal(T);
-is_literal(#c_tuple{es=Es}) -> is_literal_list(Es);
-is_literal(#c_binary{segments=Es}) -> is_lit_bin(Es);
-is_literal(_) -> false.
+is_literal(Cerl) ->
+ cerl:is_literal(cerl:fold_literal(Cerl)).
-spec is_literal_list([cerl:cerl()]) -> boolean().
is_literal_list(Es) -> lists:all(fun is_literal/1, Es).
-is_lit_bin(Es) ->
- lists:all(fun (#c_bitstr{val=E,size=S}) ->
- is_literal(E) andalso is_literal(S)
- end, Es).
-
%% Return the value of LitExpr.
-spec literal_value(cerl:c_literal() | cerl:c_binary() |
cerl:c_map() | cerl:c_cons() | cerl:c_tuple()) -> term().
-literal_value(#c_literal{val=V}) -> V;
-literal_value(#c_binary{segments=Es}) ->
- list_to_binary([literal_value_bin(Bit) || Bit <- Es]);
-literal_value(#c_cons{hd=H,tl=T}) ->
- [literal_value(H)|literal_value(T)];
-literal_value(#c_tuple{es=Es}) ->
- list_to_tuple(literal_value_list(Es));
-literal_value(#c_map{arg=Cm,es=Cmps}) ->
- M = literal_value(Cm),
- lists:foldl(fun(#c_map_pair{ key=Ck, val=Cv },Mi) ->
- K = literal_value(Ck),
- V = literal_value(Cv),
- maps:put(K,V,Mi)
- end, M, Cmps).
-
-literal_value_list(Vals) -> [literal_value(V) || V <- Vals].
-
-literal_value_bin(#c_bitstr{val=Val,size=Sz,unit=U,type=T,flags=Fs}) ->
- %% We will only handle literals constructed by make_literal/1.
- %% Could be made more general in the future if the need arises.
- 8 = literal_value(Sz),
- 1 = literal_value(U),
- integer = literal_value(T),
- [unsigned,big] = literal_value(Fs),
- literal_value(Val).
+literal_value(Cerl) ->
+ cerl:concrete(cerl:fold_literal(Cerl)).
%% Make a suitable values structure, expr or values, depending on Expr.
-spec make_values([cerl:cerl()] | cerl:cerl()) -> cerl:cerl().
@@ -212,6 +186,8 @@ vu_pattern(V, #c_tuple{es=Es}, St) ->
vu_pattern_list(V, Es, St);
vu_pattern(V, #c_binary{segments=Ss}, St) ->
vu_pat_seg_list(V, Ss, St);
+vu_pattern(V, #c_map{es=Es}, St) ->
+ vu_map_pairs(V, Es, St);
vu_pattern(V, #c_alias{var=Var,pat=P}, St0) ->
case vu_pattern(V, Var, St0) of
{true,_}=St1 -> St1;
@@ -234,6 +210,18 @@ vu_pat_seg_list(V, Ss, St) ->
end
end, St, Ss).
+vu_map_pairs(V, [#c_map_pair{key=Key,val=Pat}|T], St0) ->
+ case vu_expr(V, Key) of
+ true ->
+ {true,false};
+ false ->
+ case vu_pattern(V, Pat, St0) of
+ {true,_}=St -> St;
+ St -> vu_map_pairs(V, T, St)
+ end
+ end;
+vu_map_pairs(_, [], St) -> St.
+
-spec vu_var_list(cerl:var_name(), [cerl:c_var()]) -> boolean().
vu_var_list(V, Vs) ->
diff --git a/lib/compiler/src/core_lint.erl b/lib/compiler/src/core_lint.erl
index c0e2bdaba0..f62b2bb0ba 100644
--- a/lib/compiler/src/core_lint.erl
+++ b/lib/compiler/src/core_lint.erl
@@ -173,7 +173,7 @@ check_exports(Es, St) ->
end.
check_attrs(As, St) ->
- case all(fun ({#c_literal{},V}) -> core_lib:is_literal(V);
+ case all(fun ({#c_literal{},#c_literal{}}) -> true;
(_) -> false
end, As) of
true -> St;
diff --git a/lib/compiler/src/core_parse.hrl b/lib/compiler/src/core_parse.hrl
index 4a00535360..7fd4a82e4e 100644
--- a/lib/compiler/src/core_parse.hrl
+++ b/lib/compiler/src/core_parse.hrl
@@ -72,7 +72,8 @@
-record(c_map, {anno=[],
arg=#c_literal{val=#{}} :: cerl:c_var() | cerl:c_literal(),
- es :: [cerl:c_map_pair()]}).
+ es :: [cerl:c_map_pair()],
+ is_pat=false :: boolean()}).
-record(c_map_pair, {anno=[],
op :: #c_literal{val::'assoc'} | #c_literal{val::'exact'},
diff --git a/lib/compiler/src/core_parse.yrl b/lib/compiler/src/core_parse.yrl
index a66ad4235f..eeb9f5dba7 100644
--- a/lib/compiler/src/core_parse.yrl
+++ b/lib/compiler/src/core_parse.yrl
@@ -58,7 +58,8 @@ Terminals
%% Separators
-'(' ')' '{' '}' '[' ']' '|' ',' '->' '=' '/' '<' '>' ':' '-|' '#' '~' '::'
+'(' ')' '{' '}' '[' ']' '|' ',' '->' '=' '/' '<' '>' ':' '-|' '#'
+'~' '=>' ':='
%% Keywords (atoms are assumed to always be single-quoted).
@@ -123,7 +124,7 @@ function_definition ->
{'$1','$3'}.
anno_fun -> '(' fun_expr '-|' annotation ')' :
- core_lib:set_anno('$2', '$4').
+ cerl:set_ann('$2', '$4').
anno_fun -> fun_expr : '$1'.
%% Constant terms for annotations and attributes.
@@ -162,7 +163,7 @@ tail_constant -> ',' constant tail_constant : ['$2'|'$3'].
%% ( ( V -| <anno> ) = ( {a} -| <anno> ) -| <anno> )
anno_pattern -> '(' other_pattern '-|' annotation ')' :
- core_lib:set_anno('$2', '$4').
+ cerl:set_ann('$2', '$4').
anno_pattern -> other_pattern : '$1'.
anno_pattern -> anno_variable : '$1'.
@@ -182,23 +183,24 @@ atomic_pattern -> atomic_literal : '$1'.
tuple_pattern -> '{' '}' : c_tuple([]).
tuple_pattern -> '{' anno_patterns '}' : c_tuple('$2').
-map_pattern -> '~' '{' '}' '~' : #c_map{es=[]}.
+map_pattern -> '~' '{' '}' '~' : c_map_pattern([]).
map_pattern -> '~' '{' map_pair_patterns '}' '~' :
- #c_map{es=lists:sort('$3')}.
+ c_map_pattern(lists:sort('$3')).
map_pair_patterns -> map_pair_pattern : ['$1'].
map_pair_patterns -> map_pair_pattern ',' map_pair_patterns : ['$1' | '$3'].
-map_pair_pattern -> '~' '<' anno_pattern ',' anno_pattern '>' :
- #c_map_pair{op=#c_literal{val=exact},key='$3',val='$5'}.
+map_pair_pattern -> anno_expression ':=' anno_pattern :
+ #c_map_pair{op=#c_literal{val=exact},
+ key='$1',val='$3'}.
cons_pattern -> '[' anno_pattern tail_pattern :
- #c_cons{hd='$2',tl='$3'}.
+ c_cons('$2', '$3').
tail_pattern -> ']' : #c_literal{val=[]}.
tail_pattern -> '|' anno_pattern ']' : '$2'.
tail_pattern -> ',' anno_pattern tail_pattern :
- #c_cons{hd='$2',tl='$3'}.
+ c_cons('$2', '$3').
binary_pattern -> '#' '{' '}' '#' : #c_binary{segments=[]}.
binary_pattern -> '#' '{' segment_patterns '}' '#' : #c_binary{segments='$3'}.
@@ -206,7 +208,7 @@ binary_pattern -> '#' '{' segment_patterns '}' '#' : #c_binary{segments='$3'}.
segment_patterns -> segment_pattern ',' segment_patterns : ['$1' | '$3'].
segment_patterns -> segment_pattern : ['$1'].
-segment_pattern -> '#' '<' anno_pattern '>' '(' anno_patterns ')':
+segment_pattern -> '#' '<' anno_pattern '>' '(' anno_expressions ')':
case '$6' of
[S,U,T,Fs] ->
#c_bitstr{val='$3',size=S,unit=U,type=T,flags=Fs};
@@ -222,7 +224,7 @@ anno_variables -> anno_variable : ['$1'].
anno_variable -> variable : '$1'.
anno_variable -> '(' variable '-|' annotation ')' :
- core_lib:set_anno('$2', '$4').
+ cerl:set_ann('$2', '$4').
%% Expressions
%% Must split expressions into two levels as nested value expressions
@@ -230,7 +232,7 @@ anno_variable -> '(' variable '-|' annotation ')' :
anno_expression -> expression : '$1'.
anno_expression -> '(' expression '-|' annotation ')' :
- core_lib:set_anno('$2', '$4').
+ cerl:set_ann('$2', '$4').
anno_expressions -> anno_expression ',' anno_expressions : ['$1' | '$3'].
anno_expressions -> anno_expression : ['$1'].
@@ -279,15 +281,15 @@ cons_literal -> '[' literal tail_literal : c_cons('$2', '$3').
tail_literal -> ']' : #c_literal{val=[]}.
tail_literal -> '|' literal ']' : '$2'.
-tail_literal -> ',' literal tail_literal : #c_cons{hd='$2',tl='$3'}.
+tail_literal -> ',' literal tail_literal : c_cons('$2', '$3').
tuple -> '{' '}' : c_tuple([]).
tuple -> '{' anno_expressions '}' : c_tuple('$2').
-map_expr -> '~' '{' '}' '~' : #c_map{es=[]}.
-map_expr -> '~' '{' map_pairs '}' '~' : #c_map{es='$3'}.
-map_expr -> '~' '{' map_pairs '|' variable '}' '~' : #c_map{arg='$5',es='$3'}.
-map_expr -> '~' '{' map_pairs '|' map_expr '}' '~' : #c_map{arg='$5',es='$3'}.
+map_expr -> '~' '{' '}' '~' : c_map([]).
+map_expr -> '~' '{' map_pairs '}' '~' : c_map('$3').
+map_expr -> '~' '{' map_pairs '|' variable '}' '~' : ann_c_map([], '$5', '$3').
+map_expr -> '~' '{' map_pairs '|' map_expr '}' '~' : ann_c_map([], '$5', '$3').
map_pairs -> map_pair : ['$1'].
map_pairs -> map_pair ',' map_pairs : ['$1' | '$3'].
@@ -295,10 +297,10 @@ map_pairs -> map_pair ',' map_pairs : ['$1' | '$3'].
map_pair -> map_pair_assoc : '$1'.
map_pair -> map_pair_exact : '$1'.
-map_pair_assoc -> '::' '<' anno_expression ',' anno_expression'>' :
- #c_map_pair{op=#c_literal{val=assoc},key='$3',val='$5'}.
-map_pair_exact -> '~' '<' anno_expression ',' anno_expression'>' :
- #c_map_pair{op=#c_literal{val=exact},key='$3',val='$5'}.
+map_pair_assoc -> anno_expression '=>' anno_expression :
+ #c_map_pair{op=#c_literal{val=assoc},key='$1',val='$3'}.
+map_pair_exact -> anno_expression ':=' anno_expression :
+ #c_map_pair{op=#c_literal{val=exact},key='$1',val='$3'}.
cons -> '[' anno_expression tail : c_cons('$2', '$3').
@@ -307,7 +309,7 @@ tail -> '|' anno_expression ']' : '$2'.
tail -> ',' anno_expression tail : c_cons('$2', '$3').
binary -> '#' '{' '}' '#' : #c_literal{val = <<>>}.
-binary -> '#' '{' segments '}' '#' : #c_binary{segments='$3'}.
+binary -> '#' '{' segments '}' '#' : make_binary('$3').
segments -> segment ',' segments : ['$1' | '$3'].
segments -> segment : ['$1'].
@@ -326,7 +328,7 @@ function_name -> atom '/' integer :
anno_function_name -> function_name : '$1'.
anno_function_name -> '(' function_name '-|' annotation ')' :
- core_lib:set_anno('$2', '$4').
+ cerl:set_ann('$2', '$4').
let_vars -> anno_variable : ['$1'].
let_vars -> '<' '>' : [].
@@ -354,7 +356,7 @@ anno_clauses -> anno_clause : ['$1'].
anno_clause -> clause : '$1'.
anno_clause -> '(' clause '-|' annotation ')' :
- core_lib:set_anno('$2', '$4').
+ cerl:set_ann('$2', '$4').
clause -> clause_pattern 'when' anno_expression '->' anno_expression :
#c_clause{pats='$1',guard='$3',body='$5'}.
@@ -410,9 +412,55 @@ Erlang code.
-include("core_parse.hrl").
--import(cerl, [c_cons/2,c_tuple/1]).
+-import(cerl, [ann_c_map/3,c_cons/2,c_map/1,c_map_pattern/1,c_tuple/1]).
tok_val(T) -> element(3, T).
tok_line(T) -> element(2, T).
+%% make_binary([#c_bitstr{}]) -> #c_binary{} | #c_literal{}
+%% Create either #c_binary{} or #c_literal{} from the binary segments.
+%% In certain contexts, such as keys for maps, only literals and
+%% variables are allowed, so we must not create a #c_binary{}
+%% record in those situation.
+%%
+%% To keep this function simple, we use a crude heuristic. We will
+%% assume that Core Erlang has been produced by core_pp. If the
+%% segments *could* have been output from a literal binary by
+%% core_pp, we will create a #c_literal{}. Otherwise we will create a
+%% #c_binary{} record.
+
+make_binary(Segs) ->
+ try make_lit_bin(<<>>, Segs) of
+ Bs when is_bitstring(Bs) ->
+ #c_literal{val=Bs}
+ catch
+ throw:impossible ->
+ #c_binary{segments=Segs}
+ end.
+
+make_lit_bin(Acc, [#c_bitstr{val=I0,size=Sz0,unit=U0,type=Type0,flags=F0}|T]) ->
+ I = get_lit_val(I0),
+ Sz = get_lit_val(Sz0),
+ U = get_lit_val(U0),
+ Type = get_lit_val(Type0),
+ F = get_lit_val(F0),
+ if
+ is_integer(I), U =:= 1, Type =:= integer, F =:= [unsigned,big] ->
+ ok;
+ true ->
+ throw(impossible)
+ end,
+ if
+ Sz =< 8, T =:= [] ->
+ <<Acc/binary,I:Sz>>;
+ Sz =:= 8 ->
+ make_lit_bin(<<Acc/binary,I:8>>, T);
+ true ->
+ throw(impossible)
+ end;
+make_lit_bin(Acc, []) -> Acc.
+
+get_lit_val(#c_literal{val=Val}) -> Val;
+get_lit_val(_) -> throw(impossible).
+
%% vim: syntax=erlang
diff --git a/lib/compiler/src/core_pp.erl b/lib/compiler/src/core_pp.erl
index 03801a9b6d..9cfca88e8c 100644
--- a/lib/compiler/src/core_pp.erl
+++ b/lib/compiler/src/core_pp.erl
@@ -45,7 +45,7 @@ format(Node) ->
format(Node, #ctxt{}).
maybe_anno(Node, Fun, Ctxt) ->
- As = core_lib:get_anno(Node),
+ As = cerl:get_ann(Node),
case get_line(As) of
none ->
maybe_anno(Node, Fun, Ctxt, As);
@@ -183,15 +183,9 @@ format_1(#c_map{arg=Var,es=Es}, Ctxt) ->
"}~"
];
format_1(#c_map_pair{op=#c_literal{val=assoc},key=K,val=V}, Ctxt) ->
- ["::<",
- format_hseq([K,V], ",", add_indent(Ctxt, 1), fun format/2),
- ">"
- ];
+ format_map_pair("=>", K, V, Ctxt);
format_1(#c_map_pair{op=#c_literal{val=exact},key=K,val=V}, Ctxt) ->
- ["~<",
- format_hseq([K,V], ",", add_indent(Ctxt, 1), fun format/2),
- ">"
- ];
+ format_map_pair(":=", K, V, Ctxt);
format_1(#c_cons{hd=H,tl=T}, Ctxt) ->
Txt = ["["|format(H, add_indent(Ctxt, 1))],
[Txt|format_list_tail(T, add_indent(Ctxt, width(Txt, Ctxt)))];
@@ -201,7 +195,7 @@ format_1(#c_alias{var=V,pat=P}, Ctxt) ->
Txt = [format(V, Ctxt)|" = "],
[Txt|format(P, add_indent(Ctxt, width(Txt, Ctxt)))];
format_1(#c_let{vars=Vs0,arg=A,body=B}, Ctxt) ->
- Vs = [core_lib:set_anno(V, []) || V <- Vs0],
+ Vs = [cerl:set_ann(V, []) || V <- Vs0],
case is_simple_term(A) of
false ->
Ctxt1 = add_indent(Ctxt, Ctxt#ctxt.body_indent),
@@ -219,7 +213,7 @@ format_1(#c_let{vars=Vs0,arg=A,body=B}, Ctxt) ->
["let ",
format_values(Vs, add_indent(Ctxt, 4)),
" = ",
- format(core_lib:set_anno(A, []), Ctxt1),
+ format(cerl:set_ann(A, []), Ctxt1),
nl_indent(Ctxt),
"in "
| format(B, add_indent(Ctxt, 4))
@@ -448,6 +442,12 @@ format_list_tail(#c_cons{anno=[],hd=H,tl=T}, Ctxt) ->
format_list_tail(Tail, Ctxt) ->
["|",format(Tail, add_indent(Ctxt, 1)),"]"].
+format_map_pair(Op, K, V, Ctxt0) ->
+ Ctxt1 = add_indent(Ctxt0, 1),
+ Txt = format(K, set_class(Ctxt1, expr)),
+ Ctxt2 = add_indent(Ctxt0, width(Txt, Ctxt1)),
+ [Txt,Op,format(V, Ctxt2)].
+
indent(Ctxt) -> indent(Ctxt#ctxt.indent, Ctxt).
indent(N, _) when N =< 0 -> "";
diff --git a/lib/compiler/src/core_scan.erl b/lib/compiler/src/core_scan.erl
index b7799b373a..8ab20b1982 100644
--- a/lib/compiler/src/core_scan.erl
+++ b/lib/compiler/src/core_scan.erl
@@ -271,8 +271,10 @@ scan1("->" ++ Cs, Toks, Pos) ->
scan1(Cs, [{'->',Pos}|Toks], Pos);
scan1("-|" ++ Cs, Toks, Pos) ->
scan1(Cs, [{'-|',Pos}|Toks], Pos);
-scan1("::" ++ Cs, Toks, Pos) ->
- scan1(Cs, [{'::',Pos}|Toks], Pos);
+scan1(":=" ++ Cs, Toks, Pos) ->
+ scan1(Cs, [{':=',Pos}|Toks], Pos);
+scan1("=>" ++ Cs, Toks, Pos) ->
+ scan1(Cs, [{'=>',Pos}|Toks], Pos);
scan1([C|Cs], Toks, Pos) -> %Punctuation character
P = list_to_atom([C]),
scan1(Cs, [{P,Pos}|Toks], Pos);
diff --git a/lib/compiler/src/erl_bifs.erl b/lib/compiler/src/erl_bifs.erl
index 6c75538194..bcc2297250 100644
--- a/lib/compiler/src/erl_bifs.erl
+++ b/lib/compiler/src/erl_bifs.erl
@@ -134,6 +134,7 @@ is_pure(math, erf, 1) -> true;
is_pure(math, erfc, 1) -> true;
is_pure(math, exp, 1) -> true;
is_pure(math, log, 1) -> true;
+is_pure(math, log2, 1) -> true;
is_pure(math, log10, 1) -> true;
is_pure(math, pow, 2) -> true;
is_pure(math, sin, 1) -> true;
diff --git a/lib/compiler/src/sys_core_fold.erl b/lib/compiler/src/sys_core_fold.erl
index 82817a987a..2618f7adba 100644
--- a/lib/compiler/src/sys_core_fold.erl
+++ b/lib/compiler/src/sys_core_fold.erl
@@ -96,6 +96,10 @@
t=[], %Types
in_guard=false}). %In guard or not.
+-type type_info() :: cerl:cerl() | 'bool'.
+-type yes_no_maybe() :: 'yes' | 'no' | 'maybe'.
+-type sub() :: #sub{}.
+
-spec module(cerl:c_module(), [compile:option()]) ->
{'ok', cerl:c_module(), [_]}.
@@ -313,7 +317,7 @@ expr(#c_letrec{defs=Fs0,body=B0}=Letrec, Ctxt, Sub) ->
Fs1 = map(fun ({Name,Fb}) ->
{Name,expr(Fb, {letrec,Ctxt}, Sub)}
end, Fs0),
- B1 = body(B0, value, Sub),
+ B1 = body(B0, Ctxt, Sub),
Letrec#c_letrec{defs=Fs1,body=B1};
expr(#c_case{}=Case0, Ctxt, Sub) ->
%% Ideally, the compiler should only emit warnings when there is
@@ -462,10 +466,7 @@ is_safe_simple(#c_call{module=#c_literal{val=erlang},
case erl_internal:bool_op(Name, NumArgs) of
true ->
%% Boolean operators are safe if the arguments are boolean.
- all(fun(#c_var{name=V}) -> is_boolean_type(V, Sub);
- (#c_literal{val=Lit}) -> is_boolean(Lit);
- (_) -> false
- end, Args);
+ all(fun(C) -> is_boolean_type(C, Sub) =:= yes end, Args);
false ->
%% We need a rather complicated test to ensure that
%% we only allow safe calls that are allowed in a guard.
@@ -607,14 +608,6 @@ eval_binary_1([#c_bitstr{val=#c_literal{val=Val},size=#c_literal{val=Sz},
error:_ ->
throw(impossible)
end;
-eval_binary_1([#c_bitstr{val=#c_literal{},size=#c_literal{},
- unit=#c_literal{},type=#c_literal{},
- flags=#c_cons{}=Flags}=Bitstr|Ss], Acc0) ->
- case cerl:fold_literal(Flags) of
- #c_literal{} = Flags1 ->
- eval_binary_1([Bitstr#c_bitstr{flags=Flags1}|Ss], Acc0);
- _ -> throw(impossible)
- end;
eval_binary_1([], Acc) -> Acc;
eval_binary_1(_, _) -> throw(impossible).
@@ -688,23 +681,15 @@ count_bits_1(Int, Bits) -> count_bits_1(Int bsr 64, Bits+64).
%% a rewritten expression consisting of a sequence of
%% the arguments only is returned.
-useless_call(effect, #c_call{anno=Anno,
- module=#c_literal{val=Mod},
+useless_call(effect, #c_call{module=#c_literal{val=Mod},
name=#c_literal{val=Name},
args=Args}=Call) ->
A = length(Args),
case erl_bifs:is_safe(Mod, Name, A) of
false ->
case erl_bifs:is_pure(Mod, Name, A) of
- true ->
- case member(result_not_wanted, Anno) of
- false ->
- add_warning(Call, result_ignored);
- true ->
- ok
- end;
- false ->
- ok
+ true -> add_warning(Call, result_ignored);
+ false -> ok
end,
no;
true ->
@@ -730,385 +715,23 @@ make_effect_seq([], _) -> void().
call(#c_call{args=As}=Call, #c_literal{val=M}=M0, #c_literal{val=N}=N0, Sub) ->
case get(no_inline_list_funcs) of
true ->
- call_0(Call, M0, N0, As, Sub);
+ call_1(Call, M0, N0, As, Sub);
false ->
- call_1(Call, M, N, As, Sub)
+ case sys_core_fold_lists:call(Call, M, N, As) of
+ none ->
+ call_1(Call, M, N, As, Sub);
+ Core ->
+ expr(Core, Sub)
+ end
+
end;
call(#c_call{args=As}=Call, M, N, Sub) ->
- call_0(Call, M, N, As, Sub).
+ call_1(Call, M, N, As, Sub).
-call_0(Call, M, N, As0, Sub) ->
+call_1(Call, M, N, As0, Sub) ->
As1 = expr_list(As0, value, Sub),
fold_call(Call#c_call{args=As1}, M, N, As1, Sub).
-%% We inline some very common higher order list operations.
-%% We use the same evaluation order as the library function.
-
-call_1(#c_call{anno=Anno}, lists, all, [Arg1,Arg2], Sub) ->
- Loop = #c_var{name={'lists^all',1}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- Err1 = #c_tuple{es=[#c_literal{val='case_clause'}, X]},
- CC1 = #c_clause{pats=[#c_literal{val=true}], guard=#c_literal{val=true},
- body=#c_apply{anno=Anno, op=Loop, args=[Xs]}},
- CC2 = #c_clause{pats=[#c_literal{val=false}], guard=#c_literal{val=true},
- body=#c_literal{val=false}},
- CC3 = #c_clause{pats=[X], guard=#c_literal{val=true},
- body=match_fail(Anno, Err1)},
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=#c_case{arg=#c_apply{anno=Anno, op=F, args=[X]},
- clauses = [CC1, CC2, CC3]}},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=1}]},
- body=#c_literal{val=true}},
- Err2 = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^all',1}}|Anno], Err2)},
- Fun = #c_fun{vars=[Xs],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
- body=#c_letrec{defs=[{Loop,Fun}],
- body=#c_apply{anno=Anno, op=Loop, args=[L]}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, any, [Arg1,Arg2], Sub) ->
- Loop = #c_var{name={'lists^any',1}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- Err1 = #c_tuple{es=[#c_literal{val='case_clause'}, X]},
- CC1 = #c_clause{pats=[#c_literal{val=true}], guard=#c_literal{val=true},
- body=#c_literal{val=true}},
- CC2 = #c_clause{pats=[#c_literal{val=false}], guard=#c_literal{val=true},
- body=#c_apply{anno=Anno, op=Loop, args=[Xs]}},
- CC3 = #c_clause{pats=[X], guard=#c_literal{val=true},
- body=match_fail(Anno, Err1)},
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=#c_case{arg=#c_apply{anno=Anno, op=F, args=[X]},
- clauses = [CC1, CC2, CC3]}},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=1}]},
- body=#c_literal{val=false}},
- Err2 = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^any',1}}|Anno], Err2)},
- Fun = #c_fun{vars=[Xs],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
- body=#c_letrec{defs=[{Loop,Fun}],
- body=#c_apply{anno=Anno, op=Loop, args=[L]}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, foreach, [Arg1,Arg2], Sub) ->
- Loop = #c_var{name={'lists^foreach',1}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=#c_seq{arg=#c_apply{anno=Anno, op=F, args=[X]},
- body=#c_apply{anno=Anno, op=Loop, args=[Xs]}}},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=1}]},
- body=#c_literal{val=ok}},
- Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^foreach',1}}|Anno], Err)},
- Fun = #c_fun{vars=[Xs],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
- body=#c_letrec{defs=[{Loop,Fun}],
- body=#c_apply{anno=Anno, op=Loop, args=[L]}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, map, [Arg1,Arg2], Sub) ->
- Loop = #c_var{name={'lists^map',1}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- H = #c_var{name='H'},
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=#c_let{vars=[H], arg=#c_apply{anno=Anno,
- op=F,
- args=[X]},
- body=#c_cons{hd=H,
- anno=[compiler_generated],
- tl=#c_apply{anno=Anno,
- op=Loop,
- args=[Xs]}}}},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=1}]},
- body=#c_literal{val=[]}},
- Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^map',1}}|Anno], Err)},
- Fun = #c_fun{vars=[Xs],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
- body=#c_letrec{defs=[{Loop,Fun}],
- body=#c_apply{anno=Anno, op=Loop, args=[L]}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, flatmap, [Arg1,Arg2], Sub) ->
- Loop = #c_var{name={'lists^flatmap',1}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- H = #c_var{name='H'},
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=#c_let{vars=[H],
- arg=#c_apply{anno=Anno, op=F, args=[X]},
- body=#c_call{anno=[compiler_generated|Anno],
- module=#c_literal{val=erlang},
- name=#c_literal{val='++'},
- args=[H,
- #c_apply{anno=Anno,
- op=Loop,
- args=[Xs]}]}}},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=1}]},
- body=#c_literal{val=[]}},
- Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^flatmap',1}}|Anno], Err)},
- Fun = #c_fun{vars=[Xs],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
- body=#c_letrec{defs=[{Loop,Fun}],
- body=#c_apply{anno=Anno, op=Loop, args=[L]}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, filter, [Arg1,Arg2], Sub) ->
- Loop = #c_var{name={'lists^filter',1}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- B = #c_var{name='B'},
- Err1 = #c_tuple{es=[#c_literal{val='case_clause'}, X]},
- CC1 = #c_clause{pats=[#c_literal{val=true}], guard=#c_literal{val=true},
- body=#c_cons{anno=[compiler_generated], hd=X, tl=Xs}},
- CC2 = #c_clause{pats=[#c_literal{val=false}], guard=#c_literal{val=true},
- body=Xs},
- CC3 = #c_clause{pats=[X], guard=#c_literal{val=true},
- body=match_fail(Anno, Err1)},
- Case = #c_case{arg=B, clauses = [CC1, CC2, CC3]},
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=#c_let{vars=[B],
- arg=#c_apply{anno=Anno, op=F, args=[X]},
- body=#c_let{vars=[Xs],
- arg=#c_apply{anno=Anno,
- op=Loop,
- args=[Xs]},
- body=Case}}},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=1}]},
- body=#c_literal{val=[]}},
- Err2 = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^filter',1}}|Anno], Err2)},
- Fun = #c_fun{vars=[Xs],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
- body=#c_letrec{defs=[{Loop,Fun}],
- body=#c_apply{anno=Anno, op=Loop, args=[L]}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, foldl, [Arg1,Arg2,Arg3], Sub) ->
- Loop = #c_var{name={'lists^foldl',2}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- A = #c_var{name='A'},
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=#c_apply{anno=Anno,
- op=Loop,
- args=[Xs, #c_apply{anno=Anno,
- op=F,
- args=[X, A]}]}},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=2}]},
- body=A},
- Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, A, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^foldl',2}}|Anno], Err)},
- Fun = #c_fun{vars=[Xs, A],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, A, L], arg=#c_values{es=[Arg1, Arg2, Arg3]},
- body=#c_letrec{defs=[{Loop,Fun}],
- body=#c_apply{anno=Anno, op=Loop, args=[L, A]}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, foldr, [Arg1,Arg2,Arg3], Sub) ->
- Loop = #c_var{name={'lists^foldr',2}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- A = #c_var{name='A'},
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=#c_apply{anno=Anno,
- op=F,
- args=[X, #c_apply{anno=Anno,
- op=Loop,
- args=[Xs, A]}]}},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=2}]},
- body=A},
- Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, A, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^foldr',2}}|Anno], Err)},
- Fun = #c_fun{vars=[Xs, A],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, A, L], arg=#c_values{es=[Arg1, Arg2, Arg3]},
- body=#c_letrec{defs=[{Loop,Fun}],
- body=#c_apply{anno=Anno, op=Loop, args=[L, A]}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, mapfoldl, [Arg1,Arg2,Arg3], Sub) ->
- Loop = #c_var{name={'lists^mapfoldl',2}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- Avar = #c_var{name='A'},
- Match =
- fun (A, P, E) ->
- C1 = #c_clause{pats=[P], guard=#c_literal{val=true}, body=E},
- Err = #c_tuple{es=[#c_literal{val='badmatch'}, X]},
- C2 = #c_clause{pats=[X], guard=#c_literal{val=true},
- body=match_fail(Anno, Err)},
- #c_case{arg=A, clauses=[C1, C2]}
- end,
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=Match(#c_apply{anno=Anno, op=F, args=[X, Avar]},
- #c_tuple{es=[X, Avar]},
-%%% Tuple passing version
- Match(#c_apply{anno=Anno,
- op=Loop,
- args=[Xs, Avar]},
- #c_tuple{es=[Xs, Avar]},
- #c_tuple{anno=[compiler_generated],
- es=[#c_cons{anno=[compiler_generated],
- hd=X, tl=Xs},
- Avar]})
-%%% Multiple-value version
-%%% #c_let{vars=[Xs,A],
-%%% %% The tuple here will be optimised
-%%% %% away later; no worries.
-%%% arg=#c_apply{op=Loop, args=[Xs, A]},
-%%% body=#c_values{es=[#c_cons{hd=X, tl=Xs},
-%%% A]}}
- )},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=2}]},
-%%% Tuple passing version
- body=#c_tuple{anno=[compiler_generated],
- es=[#c_literal{val=[]}, Avar]}},
-%%% Multiple-value version
-%%% body=#c_values{es=[#c_literal{val=[]}, A]}},
- Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Avar, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^mapfoldl',2}}|Anno], Err)},
- Fun = #c_fun{vars=[Xs, Avar],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, Avar, L], arg=#c_values{es=[Arg1, Arg2, Arg3]},
- body=#c_letrec{defs=[{Loop,Fun}],
-%%% Tuple passing version
- body=#c_apply{anno=Anno,
- op=Loop,
- args=[L, Avar]}}},
-%%% Multiple-value version
-%%% body=#c_let{vars=[Xs, A],
-%%% arg=#c_apply{op=Loop,
-%%% args=[L, A]},
-%%% body=#c_tuple{es=[Xs, A]}}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, mapfoldr, [Arg1,Arg2,Arg3], Sub) ->
- Loop = #c_var{name={'lists^mapfoldr',2}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- Avar = #c_var{name='A'},
- Match =
- fun (A, P, E) ->
- C1 = #c_clause{pats=[P], guard=#c_literal{val=true}, body=E},
- Err = #c_tuple{es=[#c_literal{val='badmatch'}, X]},
- C2 = #c_clause{pats=[X], guard=#c_literal{val=true},
- body=match_fail(Anno, Err)},
- #c_case{arg=A, clauses=[C1, C2]}
- end,
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
-%%% Tuple passing version
- body=Match(#c_apply{anno=Anno,
- op=Loop,
- args=[Xs, Avar]},
- #c_tuple{es=[Xs, Avar]},
- Match(#c_apply{anno=Anno, op=F, args=[X, Avar]},
- #c_tuple{es=[X, Avar]},
- #c_tuple{anno=[compiler_generated],
- es=[#c_cons{anno=[compiler_generated],
- hd=X, tl=Xs}, Avar]}))
-%%% Multiple-value version
-%%% body=#c_let{vars=[Xs,A],
-%%% %% The tuple will be optimised away
-%%% arg=#c_apply{op=Loop, args=[Xs, A]},
-%%% body=Match(#c_apply{op=F, args=[X, A]},
-%%% #c_tuple{es=[X, A]},
-%%% #c_values{es=[#c_cons{hd=X, tl=Xs},
-%%% A]})}
- },
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=2}]},
-%%% Tuple passing version
- body=#c_tuple{anno=[compiler_generated],
- es=[#c_literal{val=[]}, Avar]}},
-%%% Multiple-value version
-%%% body=#c_values{es=[#c_literal{val=[]}, A]}},
- Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Avar, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^mapfoldr',2}}|Anno], Err)},
- Fun = #c_fun{vars=[Xs, Avar],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, Avar, L], arg=#c_values{es=[Arg1, Arg2, Arg3]},
- body=#c_letrec{defs=[{Loop,Fun}],
-%%% Tuple passing version
- body=#c_apply{anno=Anno,
- op=Loop,
- args=[L, Avar]}}},
-%%% Multiple-value version
-%%% body=#c_let{vars=[Xs, A],
-%%% arg=#c_apply{op=Loop,
-%%% args=[L, A]},
-%%% body=#c_tuple{es=[Xs, A]}}}},
- Sub);
-call_1(#c_call{module=M, name=N}=Call, _, _, As, Sub) ->
- call_0(Call, M, N, As, Sub).
-
-match_fail(Anno, Arg) ->
- #c_primop{anno=Anno,
- name=#c_literal{val='match_fail'},
- args=[Arg]}.
-
%% fold_call(Call, Mod, Name, Args, Sub) -> Expr.
%% Try to safely evaluate the call. Just try to evaluate arguments,
%% do the call and convert return values to literals. If this
@@ -1133,29 +756,33 @@ fold_call_1(Call, Mod, Name, Args, Sub) ->
true -> fold_call_2(Call, Mod, Name, Args, Sub)
end.
-fold_call_2(Call, Module, Name, Args0, Sub) ->
- try
- Args = [core_lib:literal_value(A) || A <- Args0],
- try apply(Module, Name, Args) of
- Val ->
- case cerl:is_literal_term(Val) of
- true ->
- #c_literal{val=Val};
- false ->
- %% Successful evaluation, but it was not
- %% possible to express the computed value as a literal.
- Call
- end
- catch
- error:Reason ->
- %% Evaluation of the function failed. Warn and replace
- %% the call with a call to erlang:error/1.
- eval_failure(Call, Reason)
- end
+fold_call_2(Call, Module, Name, Args, Sub) ->
+ case all(fun cerl:is_literal/1, Args) of
+ true ->
+ %% All arguments are literals.
+ fold_lit_args(Call, Module, Name, Args);
+ false ->
+ %% At least one non-literal argument.
+ fold_non_lit_args(Call, Module, Name, Args, Sub)
+ end.
+
+fold_lit_args(Call, Module, Name, Args0) ->
+ Args = [cerl:concrete(A) || A <- Args0],
+ try apply(Module, Name, Args) of
+ Val ->
+ case cerl:is_literal_term(Val) of
+ true ->
+ cerl:abstract(Val);
+ false ->
+ %% Successful evaluation, but it was not possible
+ %% to express the computed value as a literal.
+ Call
+ end
catch
- error:_ ->
- %% There was at least one non-literal argument.
- fold_non_lit_args(Call, Module, Name, Args0, Sub)
+ error:Reason ->
+ %% Evaluation of the function failed. Warn and replace
+ %% the call with a call to erlang:error/1.
+ eval_failure(Call, Reason)
end.
%% fold_non_lit_args(Call, Module, Name, Args, Sub) -> Expr.
@@ -1194,17 +821,18 @@ fold_non_lit_args(Call, _, _, _, _) -> Call.
%% Evaluate a relational operation using type information.
eval_rel_op(Call, Op, [#c_var{name=V},#c_var{name=V}], _) ->
Bool = erlang:Op(same, same),
- #c_literal{anno=core_lib:get_anno(Call),val=Bool};
-eval_rel_op(Call, '=:=', [#c_var{name=V}=Var,#c_literal{val=true}], Sub) ->
+ #c_literal{anno=cerl:get_ann(Call),val=Bool};
+eval_rel_op(Call, '=:=', [Term,#c_literal{val=true}], Sub) ->
%% BoolVar =:= true ==> BoolVar
- case is_boolean_type(V, Sub) of
- true -> Var;
- false -> Call
+ case is_boolean_type(Term, Sub) of
+ yes -> Term;
+ maybe -> Call;
+ no -> #c_literal{val=false}
end;
eval_rel_op(Call, '==', Ops, _Sub) ->
case is_exact_eq_ok(Ops) of
true ->
- Name = #c_literal{anno=core_lib:get_anno(Call),val='=:='},
+ Name = #c_literal{anno=cerl:get_ann(Call),val='=:='},
Call#c_call{name=Name};
false ->
Call
@@ -1212,7 +840,7 @@ eval_rel_op(Call, '==', Ops, _Sub) ->
eval_rel_op(Call, '/=', Ops, _Sub) ->
case is_exact_eq_ok(Ops) of
true ->
- Name = #c_literal{anno=core_lib:get_anno(Call),val='=/='},
+ Name = #c_literal{anno=cerl:get_ann(Call),val='=/='},
Call#c_call{name=Name};
false ->
Call
@@ -1229,6 +857,11 @@ is_non_numeric([H|T]) ->
is_non_numeric(H) andalso is_non_numeric(T);
is_non_numeric(Tuple) when is_tuple(Tuple) ->
is_non_numeric_tuple(Tuple, tuple_size(Tuple));
+is_non_numeric(Map) when is_map(Map) ->
+ %% Note that 17.x and 18.x compare keys in different ways.
+ %% Be very conservative -- require that both keys and values
+ %% are non-numeric.
+ is_non_numeric(maps:to_list(Map));
is_non_numeric(Num) when is_number(Num) ->
false;
is_non_numeric(_) -> true.
@@ -1242,40 +875,31 @@ is_non_numeric_tuple(_Tuple, 0) -> true.
%% there must be at least one non-literal argument (i.e.
%% there is no need to handle the case that all argments
%% are literal).
-eval_bool_op(Call, 'and', [#c_literal{val=true},#c_var{name=V}=Res], Sub) ->
- case is_boolean_type(V, Sub) of
- true -> Res;
- false-> Call
- end;
-eval_bool_op(Call, 'and', [#c_var{name=V}=Res,#c_literal{val=true}], Sub) ->
- case is_boolean_type(V, Sub) of
- true -> Res;
- false-> Call
- end;
-eval_bool_op(Call, 'and', [#c_literal{val=false}=Res,#c_var{name=V}], Sub) ->
- case is_boolean_type(V, Sub) of
- true -> Res;
- false-> Call
- end;
-eval_bool_op(Call, 'and', [#c_var{name=V},#c_literal{val=false}=Res], Sub) ->
- case is_boolean_type(V, Sub) of
- true -> Res;
- false-> Call
- end;
+
+eval_bool_op(Call, 'and', [#c_literal{val=true},Term], Sub) ->
+ eval_bool_op_1(Call, Term, Term, Sub);
+eval_bool_op(Call, 'and', [Term,#c_literal{val=true}], Sub) ->
+ eval_bool_op_1(Call, Term, Term, Sub);
+eval_bool_op(Call, 'and', [#c_literal{val=false}=Res,Term], Sub) ->
+ eval_bool_op_1(Call, Res, Term, Sub);
+eval_bool_op(Call, 'and', [Term,#c_literal{val=false}=Res], Sub) ->
+ eval_bool_op_1(Call, Res, Term, Sub);
eval_bool_op(Call, _, _, _) -> Call.
+eval_bool_op_1(Call, Res, Term, Sub) ->
+ case is_boolean_type(Term, Sub) of
+ yes -> Res;
+ no -> eval_failure(Call, badarg);
+ maybe -> Call
+ end.
+
%% Evaluate is_boolean/1 using type information.
-eval_is_boolean(Call, #c_var{name=V}, Sub) ->
- case is_boolean_type(V, Sub) of
- true -> #c_literal{val=true};
- false -> Call
- end;
-eval_is_boolean(_, #c_cons{}, _) ->
- #c_literal{val=false};
-eval_is_boolean(_, #c_tuple{}, _) ->
- #c_literal{val=false};
-eval_is_boolean(Call, _, _) ->
- Call.
+eval_is_boolean(Call, Term, Sub) ->
+ case is_boolean_type(Term, Sub) of
+ no -> #c_literal{val=false};
+ yes -> #c_literal{val=true};
+ maybe -> Call
+ end.
%% eval_length(Call, List) -> Val.
%% Evaluates the length for the prefix of List which has a known
@@ -1325,33 +949,33 @@ eval_append(Call, X, Y) ->
%% Evaluates element/2 if the position Pos is a literal and
%% the shape of the tuple Tuple is known.
%%
-eval_element(Call, #c_literal{val=Pos}, #c_tuple{es=Es}, _Types) when is_integer(Pos) ->
- if
- 1 =< Pos, Pos =< length(Es) ->
- lists:nth(Pos, Es);
- true ->
- eval_failure(Call, badarg)
- end;
-eval_element(Call, #c_literal{val=Pos}, #c_var{name=V}, Types)
+eval_element(Call, #c_literal{val=Pos}, Tuple, Types)
when is_integer(Pos) ->
- case orddict:find(V, Types#sub.t) of
- {ok,#c_tuple{es=Elements}} ->
+ case get_type(Tuple, Types) of
+ none ->
+ Call;
+ Type ->
+ Es = case cerl:is_c_tuple(Type) of
+ false -> [];
+ true -> cerl:tuple_es(Type)
+ end,
if
- 1 =< Pos, Pos =< length(Elements) ->
- case lists:nth(Pos, Elements) of
- #c_alias{var=Alias} -> Alias;
- Res -> Res
+ 1 =< Pos, Pos =< length(Es) ->
+ El = lists:nth(Pos, Es),
+ try
+ pat_to_expr(El)
+ catch
+ throw:impossible ->
+ Call
end;
true ->
+ %% Index outside tuple or not a tuple.
eval_failure(Call, badarg)
- end;
- {ok,_} ->
- eval_failure(Call, badarg);
- error ->
- Call
+ end
end;
-eval_element(Call, Pos, Tuple, _Types) ->
- case is_not_integer(Pos) orelse is_not_tuple(Tuple) of
+eval_element(Call, Pos, Tuple, Sub) ->
+ case is_int_type(Pos, Sub) =:= no orelse
+ is_tuple_type(Tuple, Sub) =:= no of
true ->
eval_failure(Call, badarg);
false ->
@@ -1361,34 +985,27 @@ eval_element(Call, Pos, Tuple, _Types) ->
%% eval_is_record(Call, Var, Tag, Size, Types) -> Val.
%% Evaluates is_record/3 using type information.
%%
-eval_is_record(Call, #c_var{name=V}, #c_literal{val=NeededTag}=Lit,
+eval_is_record(Call, Term, #c_literal{val=NeededTag},
#c_literal{val=Size}, Types) ->
- case orddict:find(V, Types#sub.t) of
- {ok,#c_tuple{es=[#c_literal{val=Tag}|_]=Es}} ->
- Lit#c_literal{val=Tag =:= NeededTag andalso
- length(Es) =:= Size};
- _ ->
- Call
+ case get_type(Term, Types) of
+ none ->
+ Call;
+ Type ->
+ Es = case cerl:is_c_tuple(Type) of
+ false -> [];
+ true -> cerl:tuple_es(Type)
+ end,
+ case Es of
+ [#c_literal{val=Tag}|_] ->
+ Bool = Tag =:= NeededTag andalso
+ length(Es) =:= Size,
+ #c_literal{val=Bool};
+ _ ->
+ #c_literal{val=false}
+ end
end;
eval_is_record(Call, _, _, _, _) -> Call.
-%% is_not_integer(Core) -> true | false.
-%% Returns true if Core is definitely not an integer.
-
-is_not_integer(#c_literal{val=Val}) when not is_integer(Val) -> true;
-is_not_integer(#c_tuple{}) -> true;
-is_not_integer(#c_cons{}) -> true;
-is_not_integer(#c_map{}) -> true;
-is_not_integer(_) -> false.
-
-%% is_not_tuple(Core) -> true | false.
-%% Returns true if Core is definitely not a tuple.
-
-is_not_tuple(#c_literal{val=Val}) when not is_tuple(Val) -> true;
-is_not_tuple(#c_cons{}) -> true;
-is_not_tuple(#c_map{}) -> true;
-is_not_tuple(_) -> false.
-
%% eval_setelement(Call, Pos, Tuple, NewVal) -> Core.
%% Evaluates setelement/3 if position Pos is an integer
%% the shape of the tuple Tuple is known.
@@ -1492,7 +1109,7 @@ clause(#c_clause{pats=Ps0,guard=G0,body=B0}=Cl, Cexpr, Ctxt, Sub0) ->
let_substs(Vs0, As0, Sub0) ->
{Vs1,Sub1} = pattern_list(Vs0, Sub0),
{Vs2,As1,Ss} = let_substs_1(Vs1, As0, Sub1),
- Sub2 = scope_add([V || #c_var{name=V} <- Vs2], Sub1),
+ Sub2 = sub_add_scope([V || #c_var{name=V} <- Vs2], Sub1),
{Vs2,As1,
foldl(fun ({V,S}, Sub) -> sub_set_name(V, S, Sub) end, Sub2, Ss)}.
@@ -1527,7 +1144,7 @@ pattern(#c_var{}=Pat, Isub, Osub) ->
true ->
V1 = make_var_name(),
Pat1 = #c_var{name=V1},
- {Pat1,sub_set_var(Pat, Pat1, scope_add([V1], Osub))};
+ {Pat1,sub_set_var(Pat, Pat1, sub_add_scope([V1], Osub))};
false ->
{Pat,sub_del_var(Pat, Osub)}
end;
@@ -1597,6 +1214,7 @@ is_subst(_) -> false.
%% sub_del_var(Var, #sub{}) -> #sub{}.
%% sub_subst_var(Var, Value, #sub{}) -> [{Name,Value}].
%% sub_is_val(Var, #sub{}) -> boolean().
+%% sub_add_scope(#sub{}) -> #sub{}
%% sub_subst_scope(#sub{}) -> #sub{}
%%
%% We use the variable name as key so as not have problems with
@@ -1607,9 +1225,10 @@ is_subst(_) -> false.
%% In addition to the list of substitutions, we also keep track of
%% all variable currently live (the scope).
%%
-%% sub_subst_scope/1 adds dummy substitutions for all variables
-%% in the scope in order to force renaming if variables in the
-%% scope occurs as pattern variables.
+%% sub_add_scope/2 adds variables to the scope. sub_subst_scope/1
+%% adds dummy substitutions for all variables in the scope in order
+%% to force renaming if variables in the scope occurs as pattern
+%% variables.
sub_new() -> #sub{v=orddict:new(),s=gb_trees:empty(),t=[]}.
@@ -1649,6 +1268,12 @@ sub_subst_var(#c_var{name=V}, Val, #sub{v=S0}) ->
%% Fold chained substitutions.
[{V,Val}] ++ [ {K,Val} || {K,#c_var{name=V1}} <- S0, V1 =:= V].
+sub_add_scope(Vs, #sub{s=Scope0}=Sub) ->
+ Scope = foldl(fun(V, S) when is_integer(V); is_atom(V) ->
+ gb_sets:add(V, S)
+ end, Scope0, Vs),
+ Sub#sub{s=Scope}.
+
sub_subst_scope(#sub{v=S0,s=Scope}=Sub) ->
S = [{-1,#c_var{name=Sv}} || Sv <- gb_sets:to_list(Scope)]++S0,
Sub#sub{v=S}.
@@ -1696,7 +1321,7 @@ clauses(E, [C0|Cs], Ctxt, Sub, LitExpr) ->
{yes,yes} ->
case LitExpr of
false ->
- Line = get_line(core_lib:get_anno(C1)),
+ Line = get_line(cerl:get_ann(C1)),
shadow_warning(Cs, Line);
true ->
%% If the case expression is a literal,
@@ -1930,7 +1555,7 @@ opt_bool_case_guard(#c_case{arg=Arg,clauses=Cs0}=Case) ->
Case;
true ->
Cs = opt_bool_case_guard(Arg, Cs0),
- Case#c_case{arg=#c_values{anno=core_lib:get_anno(Arg),es=[]},
+ Case#c_case{arg=#c_values{anno=cerl:get_ann(Arg),es=[]},
clauses=Cs}
end.
@@ -2041,182 +1666,259 @@ case_opt_args([], Cs, _Sub, _LitExpr, Acc) ->
%% or to remove a literal argument.
%%
case_opt_arg(E0, Sub, Cs, LitExpr) ->
- E = maybe_replace_var(E0, Sub),
- case cerl:is_data(E) of
+ case cerl:is_c_var(E0) of
false ->
- {error,Cs};
+ case_opt_arg_1(E0, Cs, LitExpr);
true ->
+ case case_will_var_match(Cs) of
+ true ->
+ %% All clauses will match a variable in the
+ %% current position. Don't expand this variable
+ %% (that can only make the code worse).
+ {error,Cs};
+ false ->
+ %% If possible, expand this variable to a previously
+ %% matched term.
+ E = case_expand_var(E0, Sub),
+ case_opt_arg_1(E, Cs, LitExpr)
+ end
+ end.
+
+case_opt_arg_1(E0, Cs0, LitExpr) ->
+ case cerl:is_data(E0) of
+ false ->
+ {error,Cs0};
+ true ->
+ E = case_opt_compiler_generated(E0),
+ Cs = case_opt_nomatch(E, Cs0, LitExpr),
case cerl:data_type(E) of
{atomic,_} ->
- case_opt_lit(E, Cs, LitExpr);
+ case_opt_lit(E, Cs);
_ ->
- case_opt_data(E, Cs, LitExpr)
+ case_opt_data(E, Cs)
end
end.
-%% maybe_replace_var(Expr0, Sub) -> Expr
+%% case_will_var_match([Clause]) -> true | false.
+%% Return if all clauses will match a variable in the
+%% current position.
+%%
+case_will_var_match(Cs) ->
+ all(fun({[P|_],_,_,_}) ->
+ case cerl_clauses:match(P, any) of
+ {true,_} -> true;
+ _ -> false
+ end
+ end, Cs).
+
+
+%% case_opt_compiler_generated(Core) -> Core'
+%% Mark Core expressions as compiler generated to ensure that
+%% no warnings are generated if they turn out to be unused.
+%% To pretty-printed Core Erlang easier to read, don't mark
+%% constructs that can't cause warnings to be emitted.
+%%
+case_opt_compiler_generated(Core) ->
+ F = fun(C) ->
+ case cerl:type(C) of
+ alias -> C;
+ var -> C;
+ _ -> cerl:set_ann(C, [compiler_generated])
+ end
+ end,
+ cerl_trees:map(F, Core).
+
+
+%% case_expand_var(Expr0, Sub) -> Expr
%% If Expr0 is a variable that has been previously matched and
%% is known to be a tuple, return the tuple instead. Otherwise
%% return Expr0 unchanged.
%%
-maybe_replace_var(E, Sub) ->
- case cerl:is_c_var(E) of
- false -> E;
- true -> maybe_replace_var_1(E, Sub)
- end.
-
-maybe_replace_var_1(E, #sub{t=Tdb}) ->
+case_expand_var(E, #sub{t=Tdb}) ->
case orddict:find(cerl:var_name(E), Tdb) of
{ok,T0} ->
case cerl:is_c_tuple(T0) of
false ->
E;
true ->
- cerl_trees:map(fun(C) ->
- case cerl:is_c_alias(C) of
- false -> C;
- true -> cerl:alias_pat(C)
- end
- end, T0)
+ %% The pattern was a tuple. Now we must make sure
+ %% that the elements of the tuple are suitable. In
+ %% particular, we don't want binary or map
+ %% construction here, since that means that the
+ %% binary or map will be constructed in the 'case'
+ %% argument. That is wasteful for binaries. Even
+ %% worse is that any map pattern that use the ':='
+ %% operator will fail when used in map
+ %% construction (only the '=>' operator is allowed
+ %% when constructing a map from scratch).
+ try
+ cerl_trees:map(fun coerce_to_data/1, T0)
+ catch
+ throw:impossible ->
+ %% Something unsuitable was found (map or
+ %% or binary). Keep the variable.
+ E
+ end
end;
error ->
E
end.
-%% case_opt_lit(Literal, Clauses0, LitExpr) ->
-%% {ok,[],Clauses} | error
-%% The current part of the case expression is a literal. That
-%% means that we will know at compile-time whether a clause
-%% will match, and we can remove the corresponding pattern from
-%% each clause.
-%%
-%% The only complication is if the literal is a binary. Binary
-%% pattern matching is tricky, so we will give up in that case.
+%% coerce_to_data(Core) -> Core'
+%% Coerce an element originally from a pattern to an data item or or
+%% variable. Throw an 'impossible' exception if non-data Core Erlang
+%% terms such as binary construction or map construction are
+%% encountered.
-case_opt_lit(Lit, Cs0, LitExpr) ->
- Cs1 = case_opt_lit_1(Lit, Cs0, LitExpr),
- try case_opt_lit_2(Lit, Cs1) of
- Cs ->
- {ok,[],Cs}
- catch
- throw:impossible ->
- {error,Cs1}
+coerce_to_data(C) ->
+ case cerl:is_c_alias(C) of
+ false ->
+ case cerl:is_data(C) orelse cerl:is_c_var(C) of
+ true -> C;
+ false -> throw(impossible)
+ end;
+ true ->
+ coerce_to_data(cerl:alias_pat(C))
end.
-case_opt_lit_1(E, [{[P|_],C,_,_}=Current|Cs], LitExpr) ->
+%% case_opt_nomatch(E, Clauses, LitExpr) -> Clauses'
+%% Remove all clauses that cannot possibly match.
+
+case_opt_nomatch(E, [{[P|_],C,_,_}=Current|Cs], LitExpr) ->
case cerl_clauses:match(P, E) of
none ->
- %% The pattern will not match the literal. Remove the clause.
- %% Unless the entire case expression is a literal, also
- %% emit a warning.
+ %% The pattern will not match the case expression. Remove
+ %% the clause. Unless the entire case expression is a
+ %% literal, also emit a warning.
case LitExpr of
false -> add_warning(C, nomatch_clause_type);
true -> ok
end,
- case_opt_lit_1(E, Cs, LitExpr);
+ case_opt_nomatch(E, Cs, LitExpr);
_ ->
- [Current|case_opt_lit_1(E, Cs, LitExpr)]
+ [Current|case_opt_nomatch(E, Cs, LitExpr)]
end;
-case_opt_lit_1(_, [], _) -> [].
+case_opt_nomatch(_, [], _) -> [].
+
+%% case_opt_lit(Literal, Clauses0) -> {ok,[],Clauses} | error
+%% The current part of the case expression is a literal. That
+%% means that we will know at compile-time whether a clause
+%% will match, and we can remove the corresponding pattern from
+%% each clause.
+%%
+%% The only complication is if the literal is a binary or map.
+%% In general, it is difficult to know whether a binary or
+%% map pattern will match, so we give up in that case.
+
+case_opt_lit(Lit, Cs0) ->
+ try case_opt_lit_1(Lit, Cs0) of
+ Cs ->
+ {ok,[],Cs}
+ catch
+ throw:impossible ->
+ {error,Cs0}
+ end.
-case_opt_lit_2(E, [{[P|Ps],C,PsAcc,Bs0}|Cs]) ->
- %% Non-matching clauses have already been removed in case_opt_lit_1/3.
+case_opt_lit_1(E, [{[P|Ps],C,PsAcc,Bs0}|Cs]) ->
+ %% Non-matching clauses have already been removed
+ %% in case_opt_nomatch/3.
case cerl_clauses:match(P, E) of
{true,Bs} ->
%% The pattern matches the literal. Remove the pattern
%% and update the bindings.
- [{Ps,C,PsAcc,Bs++Bs0}|case_opt_lit_2(E, Cs)];
+ [{Ps,C,PsAcc,Bs++Bs0}|case_opt_lit_1(E, Cs)];
{false,_} ->
%% Binary literal and pattern. We are not sure whether
%% the pattern will match.
throw(impossible)
end;
-case_opt_lit_2(_, []) -> [].
+case_opt_lit_1(_, []) -> [].
%% case_opt_data(Expr, Clauses0, LitExpr) -> {ok,Exprs,Clauses}
+%% The case expression is a non-atomic data constructor (cons
+%% or tuple). We can know at compile time whether each clause
+%% will match, and we can delay the building of the data to
+%% the clauses where it is actually needed.
-case_opt_data(E, Cs0, LitExpr) ->
+case_opt_data(E, Cs0) ->
Es = cerl:data_es(E),
- Cs = case_opt_data_1(Cs0, Es,
- {cerl:data_type(E),cerl:data_arity(E)},
- LitExpr),
- {ok,Es,Cs}.
-
-case_opt_data_1([{[P|Ps0],C,PsAcc,Bs0}|Cs], Es, TypeSig, LitExpr) ->
- case case_data_pat(P, TypeSig) of
- {ok,Ps1,Bs1} ->
- [{Ps1++Ps0,C,PsAcc,Bs1++Bs0}|
- case_opt_data_1(Cs, Es, TypeSig,LitExpr)];
- error ->
- case LitExpr of
- false -> add_warning(C, nomatch_clause_type);
- true -> ok
- end,
- case_opt_data_1(Cs, Es, TypeSig, LitExpr)
- end;
-case_opt_data_1([], _, _, _) -> [].
-
-%% case_data_pat(Pattern, Type, Arity) -> {ok,[Pattern],[{AliasVar,Pat}]} | error.
-
-case_data_pat(P, TypeSig) ->
- case cerl:is_data(P) of
- false ->
- case_data_pat_var(P, TypeSig);
- true ->
- case {cerl:data_type(P),cerl:data_arity(P)} of
- TypeSig ->
- {ok,cerl:data_es(P),[]};
- {_,_} ->
- error
- end
+ TypeSig = {cerl:data_type(E),cerl:data_arity(E)},
+ try case_opt_data_1(Cs0, Es, TypeSig) of
+ Cs ->
+ {ok,Es,Cs}
+ catch
+ throw:impossible ->
+ %% The pattern contained a binary or map.
+ {error,Cs0}
end.
-%% case_data_pat_var(Pattern, {DataType,ArityType}) ->
-%% {ok,[Pattern],[{AliasVar,Pat}]}
+case_opt_data_1([{[P0|Ps0],C,PsAcc,Bs0}|Cs], Es, TypeSig) ->
+ P = case_opt_compiler_generated(P0),
+ BindTo = #c_var{name=dummy},
+ {Ps1,[{BindTo,_}|Bs1]} = case_data_pat_alias(P, BindTo, TypeSig, []),
+ [{Ps1++Ps0,C,PsAcc,Bs1++Bs0}|case_opt_data_1(Cs, Es, TypeSig)];
+case_opt_data_1([], _, _) -> [].
-case_data_pat_var(P, {Type,Arity}=TypeSig) ->
- %% If the entire case statement is evaluated in an effect
- %% context (e.g. "case {A,B} of ... end, ok"), there will
- %% be a warning that a term is constructed but never used.
- %% To avoid that warning, we must annotate the data
- %% constructor as compiler generated.
- Ann = [compiler_generated|cerl:get_ann(P)],
+case_data_pat_alias(P, BindTo0, TypeSig, Bs0) ->
case cerl:type(P) of
- var ->
- Vars = make_vars(cerl:get_ann(P), Arity),
- {ok,Vars,[{P,cerl:ann_make_data(Ann, Type, Vars)}]};
alias ->
- V = cerl:alias_var(P),
- Apat = cerl:alias_pat(P),
- case case_data_pat(Apat, TypeSig) of
- {ok,Ps,Bs} ->
- {ok,Ps,[{V,cerl:ann_make_data(Ann, Type, unalias_pat_list(Ps))}|Bs]};
- error ->
- error
- end;
+ %% Recursively handle the pattern and bind to
+ %% the alias variable.
+ BindTo = cerl:alias_var(P),
+ Apat0 = cerl:alias_pat(P),
+ Ann = [compiler_generated],
+ Apat = cerl:set_ann(Apat0, Ann),
+ {Ps,Bs} = case_data_pat_alias(Apat, BindTo, TypeSig, Bs0),
+ {Ps,[{BindTo0,BindTo}|Bs]};
+ var ->
+ %% Here we will need to actually build the data and bind
+ %% it to the variable.
+ {Type,Arity} = TypeSig,
+ Vars = make_vars([], Arity),
+ Ann = [compiler_generated],
+ Data = cerl:ann_make_data(Ann, Type, Vars),
+ Bs = [{BindTo0,P},{P,Data}|Bs0],
+ {Vars,Bs};
_ ->
- error
+ %% Since case_opt_nomatch/3 has removed all clauses that
+ %% cannot match, we KNOW that this clause must match and
+ %% that the pattern must be a data constructor.
+ %% Here we must build the data and bind it to the variable.
+ {Type,_} = TypeSig,
+ DataEs = cerl:data_es(P),
+ Vars = pat_to_expr_list(DataEs),
+ Ann = [compiler_generated],
+ Data = cerl:ann_make_data(Ann, Type, Vars),
+ {DataEs,[{BindTo0,Data}]}
end.
-%% unalias_pat(Pattern) -> Pattern.
-%% Remove all the aliases in a pattern but using the alias variables
-%% instead of the values. We KNOW they will be bound.
+%% pat_to_expr(Pattern) -> Expression.
+%% Convert a pattern to an expression if possible. We KNOW that
+%% all variables in the pattern will be bound.
+%%
+%% Throw an 'impossible' exception if a map or (non-literal)
+%% binary is encountered. Trying to use a map pattern as an
+%% expression is incorrect, while rebuilding a potentially
+%% huge binary in an expression would be wasteful.
-unalias_pat(P) ->
- case cerl:is_c_alias(P) of
- true ->
+pat_to_expr(P) ->
+ case cerl:type(P) of
+ alias ->
cerl:alias_var(P);
- false ->
+ var ->
+ P;
+ _ ->
case cerl:is_data(P) of
false ->
- P;
+ %% Map or binary.
+ throw(impossible);
true ->
- Es = unalias_pat_list(cerl:data_es(P)),
+ Es = pat_to_expr_list(cerl:data_es(P)),
cerl:update_data(P, cerl:data_type(P), Es)
end
end.
-unalias_pat_list(Ps) -> [unalias_pat(P) || P <- Ps].
+pat_to_expr_list(Ps) -> [pat_to_expr(P) || P <- Ps].
make_vars(A, Max) ->
make_vars(A, 1, Max).
@@ -2234,18 +1936,11 @@ make_var_name() ->
list_to_atom("fol"++integer_to_list(N)).
letify(Bs, Body) ->
+ Ann = cerl:get_ann(Body),
foldr(fun({V,Val}, B) ->
- letify(V, Val, B)
+ cerl:ann_c_let(Ann, [V], Val, B)
end, Body, Bs).
-letify(#c_var{name=Vname}=Var, Val, Body) ->
- case core_lib:is_var_used(Vname, Body) of
- true ->
- A = element(2, Body),
- #c_let{anno=A,vars=[Var],arg=Val,body=Body};
- false -> Body
- end.
-
%% opt_case_in_let(LetExpr) -> LetExpr'
opt_case_in_let(#c_let{vars=Vs,arg=Arg,body=B}=Let, Sub) ->
@@ -2334,11 +2029,8 @@ is_bool_expr(#c_let{vars=[V],arg=Arg,body=B}, Sub0) ->
is_bool_expr(#c_let{body=B}, Sub) ->
%% Binding of multiple variables.
is_bool_expr(B, Sub);
-is_bool_expr(#c_literal{val=Bool}, _) when is_boolean(Bool) ->
- true;
-is_bool_expr(#c_var{name=V}, Sub) ->
- is_boolean_type(V, Sub);
-is_bool_expr(_, _) -> false.
+is_bool_expr(C, Sub) ->
+ is_boolean_type(C, Sub) =:= yes.
is_bool_expr_list([C|Cs], Sub) ->
is_bool_expr(C, Sub) andalso is_bool_expr_list(Cs, Sub);
@@ -2552,12 +2244,6 @@ move_let_into_expr(_Let, _Expr, _Sub) -> impossible.
is_failing_clause(#c_clause{body=B}) ->
will_fail(B).
-scope_add(Vs, #sub{s=Scope0}=Sub) ->
- Scope = foldl(fun(V, S) when is_integer(V); is_atom(V) ->
- gb_sets:add(V, S)
- end, Scope0, Vs),
- Sub#sub{s=Scope}.
-
%% opt_simple_let(#c_let{}, Context, Sub) -> CoreTerm
%% Optimize a let construct that does not contain any lets in
%% in its argument.
@@ -2586,31 +2272,7 @@ opt_simple_let_1(#c_let{vars=Vs0,body=B0}=Let, Arg0, Ctxt, Sub0) ->
Arg = core_lib:make_values(Args),
opt_simple_let_2(Let, Vs, Arg, B, Ctxt, Sub1).
-opt_simple_let_2(Let0, Vs0, Arg0, Body0, effect, Sub) ->
- case {Vs0,Arg0,Body0} of
- {[],#c_values{es=[]},Body} ->
- %% No variables left (because of substitutions).
- Body;
- {[_|_],Arg,#c_literal{}} ->
- %% The body is a literal. That means that we can ignore
- %% it and that the return value is Arg revisited in
- %% effect context.
- body(Arg, effect, sub_new_preserve_types(Sub));
- {Vs,Arg,Body} ->
- %% Since we are in effect context, there is a chance
- %% that the body no longer references the variables.
- %% In that case we can construct a sequence and visit
- %% that in effect context:
- %% let <Var> = Arg in BodyWithoutVar ==> seq Arg BodyWithoutVar
- case is_any_var_used(Vs, Body) of
- false ->
- expr(#c_seq{arg=Arg,body=Body}, effect, sub_new_preserve_types(Sub));
- true ->
- Let = Let0#c_let{vars=Vs,arg=Arg,body=Body},
- opt_case_in_let_arg(opt_case_in_let(Let, Sub), effect, Sub)
- end
- end;
-opt_simple_let_2(Let, Vs0, Arg0, Body, value, Sub) ->
+opt_simple_let_2(Let0, Vs0, Arg0, Body, Ctxt, Sub) ->
case {Vs0,Arg0,Body} of
{[#c_var{name=N1}],Arg,#c_var{name=N2}} ->
case N1 =:= N2 of
@@ -2619,19 +2281,38 @@ opt_simple_let_2(Let, Vs0, Arg0, Body, value, Sub) ->
Arg;
false ->
%% let <Var> = Arg in <OtherVar> ==> seq Arg OtherVar
- expr(#c_seq{arg=Arg,body=Body}, value, sub_new_preserve_types(Sub))
+ expr(#c_seq{arg=Arg,body=Body}, Ctxt,
+ sub_new_preserve_types(Sub))
end;
{[],#c_values{es=[]},_} ->
%% No variables left.
Body;
{_,Arg,#c_literal{}} ->
- %% The variable is not used in the body. The argument
- %% can be evaluated in effect context to simplify it.
- expr(#c_seq{arg=Arg,body=Body}, value, sub_new_preserve_types(Sub));
+ E = case Ctxt of
+ effect ->
+ %% Throw away the literal body.
+ Arg;
+ value ->
+ %% Since the variable is not used in the body, we
+ %% can rewrite the let to a sequence.
+ %% let <Var> = Arg in Literal ==> seq Arg Literal
+ #c_seq{arg=Arg,body=Body}
+ end,
+ expr(E, Ctxt, sub_new_preserve_types(Sub));
{Vs,Arg,Body} ->
- opt_case_in_let_arg(
- opt_case_in_let(Let#c_let{vars=Vs,arg=Arg,body=Body}, Sub),
- value, Sub)
+ %% If none of the variables are used in the body, we can
+ %% rewrite the let to a sequence:
+ %% let <Var> = Arg in BodyWithoutVar ==>
+ %% seq Arg BodyWithoutVar
+ case is_any_var_used(Vs, Body) of
+ false ->
+ expr(#c_seq{arg=Arg,body=Body}, Ctxt,
+ sub_new_preserve_types(Sub));
+ true ->
+ Let1 = Let0#c_let{vars=Vs,arg=Arg,body=Body},
+ Let2 = opt_case_in_let(Let1, Sub),
+ opt_case_in_let_arg(Let2, Ctxt, Sub)
+ end
end.
move_case_into_arg(#c_case{arg=#c_let{vars=OuterVars0,arg=OuterArg,
@@ -2754,12 +2435,61 @@ is_any_var_used([#c_var{name=V}|Vs], Expr) ->
end;
is_any_var_used([], _) -> false.
-is_boolean_type(V, #sub{t=Tdb}) ->
+%%%
+%%% Retrieving information about types.
+%%%
+
+-spec get_type(cerl:cerl(), #sub{}) -> type_info() | 'none'.
+
+get_type(#c_var{name=V}, #sub{t=Tdb}) ->
case orddict:find(V, Tdb) of
- {ok,bool} -> true;
- _ -> false
+ {ok,Type} -> Type;
+ error -> none
+ end;
+get_type(C, _) ->
+ case cerl:type(C) of
+ binary -> C;
+ map -> C;
+ _ ->
+ case cerl:is_data(C) of
+ true -> C;
+ false -> none
+ end
+ end.
+
+-spec is_boolean_type(cerl:cerl(), sub()) -> yes_no_maybe().
+
+is_boolean_type(Var, Sub) ->
+ case get_type(Var, Sub) of
+ none ->
+ maybe;
+ bool ->
+ yes;
+ C ->
+ B = cerl:is_c_atom(C) andalso
+ is_boolean(cerl:atom_val(C)),
+ yes_no(B)
+ end.
+
+-spec is_int_type(cerl:cerl(), sub()) -> yes_no_maybe().
+
+is_int_type(Var, Sub) ->
+ case get_type(Var, Sub) of
+ none -> maybe;
+ C -> yes_no(cerl:is_c_int(C))
+ end.
+
+-spec is_tuple_type(cerl:cerl(), sub()) -> yes_no_maybe().
+
+is_tuple_type(Var, Sub) ->
+ case get_type(Var, Sub) of
+ none -> maybe;
+ C -> yes_no(cerl:is_c_tuple(C))
end.
+yes_no(true) -> yes;
+yes_no(false) -> no.
+
%% update_types(Expr, Pattern, Sub) -> Sub'
%% Update the type database.
update_types(Expr, Pat, #sub{t=Tdb0}=Sub) ->
@@ -3081,11 +2811,11 @@ add_bin_opt_info(Core, Term) ->
end.
add_warning(Core, Term) ->
- case is_compiler_generated(Core) of
+ case suppress_warning(Core) of
true ->
ok;
false ->
- Anno = core_lib:get_anno(Core),
+ Anno = cerl:get_ann(Core),
Line = get_line(Anno),
File = get_file(Anno),
Key = {?MODULE,warnings},
@@ -3106,9 +2836,17 @@ get_file([{file,File}|_]) -> File;
get_file([_|T]) -> get_file(T);
get_file([]) -> "no_file". % should not happen
+suppress_warning(Core) ->
+ is_compiler_generated(Core) orelse
+ is_result_unwanted(Core).
+
is_compiler_generated(Core) ->
- Anno = core_lib:get_anno(Core),
- member(compiler_generated, Anno).
+ Ann = cerl:get_ann(Core),
+ member(compiler_generated, Ann).
+
+is_result_unwanted(Core) ->
+ Ann = cerl:get_ann(Core),
+ member(result_not_wanted, Ann).
get_warnings() ->
ordsets:from_list((erase({?MODULE,warnings}))).
diff --git a/lib/compiler/src/sys_core_fold_lists.erl b/lib/compiler/src/sys_core_fold_lists.erl
new file mode 100644
index 0000000000..49dc59052a
--- /dev/null
+++ b/lib/compiler/src/sys_core_fold_lists.erl
@@ -0,0 +1,386 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%% Purpose : Inline high order lists functions from the lists module.
+
+-module(sys_core_fold_lists).
+
+-export([call/4]).
+
+-include("core_parse.hrl").
+
+%% We inline some very common higher order list operations.
+%% We use the same evaluation order as the library function.
+
+-spec call(cerl:c_call(), atom(), atom(), [cerl:cerl()]) ->
+ 'none' | cerl:cerl().
+
+call(#c_call{anno=Anno}, lists, all, [Arg1,Arg2]) ->
+ Loop = #c_var{name={'lists^all',1}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ Err1 = #c_tuple{es=[#c_literal{val='case_clause'}, X]},
+ CC1 = #c_clause{pats=[#c_literal{val=true}], guard=#c_literal{val=true},
+ body=#c_apply{anno=Anno, op=Loop, args=[Xs]}},
+ CC2 = #c_clause{pats=[#c_literal{val=false}], guard=#c_literal{val=true},
+ body=#c_literal{val=false}},
+ CC3 = #c_clause{pats=[X], guard=#c_literal{val=true},
+ body=match_fail(Anno, Err1)},
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=#c_case{arg=#c_apply{anno=Anno, op=F, args=[X]},
+ clauses = [CC1, CC2, CC3]}},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=1}]},
+ body=#c_literal{val=true}},
+ Err2 = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^all',1}}|Anno], Err2)},
+ Fun = #c_fun{vars=[Xs],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+ body=#c_apply{anno=Anno, op=Loop, args=[L]}}};
+call(#c_call{anno=Anno}, lists, any, [Arg1,Arg2]) ->
+ Loop = #c_var{name={'lists^any',1}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ Err1 = #c_tuple{es=[#c_literal{val='case_clause'}, X]},
+ CC1 = #c_clause{pats=[#c_literal{val=true}], guard=#c_literal{val=true},
+ body=#c_literal{val=true}},
+ CC2 = #c_clause{pats=[#c_literal{val=false}], guard=#c_literal{val=true},
+ body=#c_apply{anno=Anno, op=Loop, args=[Xs]}},
+ CC3 = #c_clause{pats=[X], guard=#c_literal{val=true},
+ body=match_fail(Anno, Err1)},
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=#c_case{arg=#c_apply{anno=Anno, op=F, args=[X]},
+ clauses = [CC1, CC2, CC3]}},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=1}]},
+ body=#c_literal{val=false}},
+ Err2 = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^any',1}}|Anno], Err2)},
+ Fun = #c_fun{vars=[Xs],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+ body=#c_apply{anno=Anno, op=Loop, args=[L]}}};
+call(#c_call{anno=Anno}, lists, foreach, [Arg1,Arg2]) ->
+ Loop = #c_var{name={'lists^foreach',1}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=#c_seq{arg=#c_apply{anno=Anno, op=F, args=[X]},
+ body=#c_apply{anno=Anno, op=Loop, args=[Xs]}}},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=1}]},
+ body=#c_literal{val=ok}},
+ Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^foreach',1}}|Anno], Err)},
+ Fun = #c_fun{vars=[Xs],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+ body=#c_apply{anno=Anno, op=Loop, args=[L]}}};
+call(#c_call{anno=Anno}, lists, map, [Arg1,Arg2]) ->
+ Loop = #c_var{name={'lists^map',1}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ H = #c_var{name='H'},
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=#c_let{vars=[H], arg=#c_apply{anno=Anno,
+ op=F,
+ args=[X]},
+ body=#c_cons{hd=H,
+ anno=[compiler_generated],
+ tl=#c_apply{anno=Anno,
+ op=Loop,
+ args=[Xs]}}}},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=1}]},
+ body=#c_literal{val=[]}},
+ Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^map',1}}|Anno], Err)},
+ Fun = #c_fun{vars=[Xs],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+ body=#c_apply{anno=Anno, op=Loop, args=[L]}}};
+call(#c_call{anno=Anno}, lists, flatmap, [Arg1,Arg2]) ->
+ Loop = #c_var{name={'lists^flatmap',1}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ H = #c_var{name='H'},
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=#c_let{vars=[H],
+ arg=#c_apply{anno=Anno, op=F, args=[X]},
+ body=#c_call{anno=[compiler_generated|Anno],
+ module=#c_literal{val=erlang},
+ name=#c_literal{val='++'},
+ args=[H,
+ #c_apply{anno=Anno,
+ op=Loop,
+ args=[Xs]}]}}},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=1}]},
+ body=#c_literal{val=[]}},
+ Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^flatmap',1}}|Anno], Err)},
+ Fun = #c_fun{vars=[Xs],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+ body=#c_apply{anno=Anno, op=Loop, args=[L]}}};
+call(#c_call{anno=Anno}, lists, filter, [Arg1,Arg2]) ->
+ Loop = #c_var{name={'lists^filter',1}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ B = #c_var{name='B'},
+ Err1 = #c_tuple{es=[#c_literal{val='case_clause'}, X]},
+ CC1 = #c_clause{pats=[#c_literal{val=true}], guard=#c_literal{val=true},
+ body=#c_cons{anno=[compiler_generated], hd=X, tl=Xs}},
+ CC2 = #c_clause{pats=[#c_literal{val=false}], guard=#c_literal{val=true},
+ body=Xs},
+ CC3 = #c_clause{pats=[X], guard=#c_literal{val=true},
+ body=match_fail(Anno, Err1)},
+ Case = #c_case{arg=B, clauses = [CC1, CC2, CC3]},
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=#c_let{vars=[B],
+ arg=#c_apply{anno=Anno, op=F, args=[X]},
+ body=#c_let{vars=[Xs],
+ arg=#c_apply{anno=Anno,
+ op=Loop,
+ args=[Xs]},
+ body=Case}}},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=1}]},
+ body=#c_literal{val=[]}},
+ Err2 = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^filter',1}}|Anno], Err2)},
+ Fun = #c_fun{vars=[Xs],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+ body=#c_apply{anno=Anno, op=Loop, args=[L]}}};
+call(#c_call{anno=Anno}, lists, foldl, [Arg1,Arg2,Arg3]) ->
+ Loop = #c_var{name={'lists^foldl',2}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ A = #c_var{name='A'},
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=#c_apply{anno=Anno,
+ op=Loop,
+ args=[Xs, #c_apply{anno=Anno,
+ op=F,
+ args=[X, A]}]}},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=2}]},
+ body=A},
+ Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, A, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^foldl',2}}|Anno], Err)},
+ Fun = #c_fun{vars=[Xs, A],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, A, L], arg=#c_values{es=[Arg1, Arg2, Arg3]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+ body=#c_apply{anno=Anno, op=Loop, args=[L, A]}}};
+call(#c_call{anno=Anno}, lists, foldr, [Arg1,Arg2,Arg3]) ->
+ Loop = #c_var{name={'lists^foldr',2}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ A = #c_var{name='A'},
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=#c_apply{anno=Anno,
+ op=F,
+ args=[X, #c_apply{anno=Anno,
+ op=Loop,
+ args=[Xs, A]}]}},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=2}]},
+ body=A},
+ Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, A, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^foldr',2}}|Anno], Err)},
+ Fun = #c_fun{vars=[Xs, A],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, A, L], arg=#c_values{es=[Arg1, Arg2, Arg3]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+ body=#c_apply{anno=Anno, op=Loop, args=[L, A]}}};
+call(#c_call{anno=Anno}, lists, mapfoldl, [Arg1,Arg2,Arg3]) ->
+ Loop = #c_var{name={'lists^mapfoldl',2}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ Avar = #c_var{name='A'},
+ Match =
+ fun (A, P, E) ->
+ C1 = #c_clause{pats=[P], guard=#c_literal{val=true}, body=E},
+ Err = #c_tuple{es=[#c_literal{val='badmatch'}, X]},
+ C2 = #c_clause{pats=[X], guard=#c_literal{val=true},
+ body=match_fail(Anno, Err)},
+ #c_case{arg=A, clauses=[C1, C2]}
+ end,
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=Match(#c_apply{anno=Anno, op=F, args=[X, Avar]},
+ #c_tuple{es=[X, Avar]},
+%%% Tuple passing version
+ Match(#c_apply{anno=Anno,
+ op=Loop,
+ args=[Xs, Avar]},
+ #c_tuple{es=[Xs, Avar]},
+ #c_tuple{anno=[compiler_generated],
+ es=[#c_cons{anno=[compiler_generated],
+ hd=X, tl=Xs},
+ Avar]})
+%%% Multiple-value version
+%%% #c_let{vars=[Xs,A],
+%%% %% The tuple here will be optimised
+%%% %% away later; no worries.
+%%% arg=#c_apply{op=Loop, args=[Xs, A]},
+%%% body=#c_values{es=[#c_cons{hd=X, tl=Xs},
+%%% A]}}
+ )},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=2}]},
+%%% Tuple passing version
+ body=#c_tuple{anno=[compiler_generated],
+ es=[#c_literal{val=[]}, Avar]}},
+%%% Multiple-value version
+%%% body=#c_values{es=[#c_literal{val=[]}, A]}},
+ Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Avar, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^mapfoldl',2}}|Anno], Err)},
+ Fun = #c_fun{vars=[Xs, Avar],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, Avar, L], arg=#c_values{es=[Arg1, Arg2, Arg3]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+%%% Tuple passing version
+ body=#c_apply{anno=Anno,
+ op=Loop,
+ args=[L, Avar]}}};
+%%% Multiple-value version
+%%% body=#c_let{vars=[Xs, A],
+%%% arg=#c_apply{op=Loop,
+%%% args=[L, A]},
+%%% body=#c_tuple{es=[Xs, A]}}}};
+call(#c_call{anno=Anno}, lists, mapfoldr, [Arg1,Arg2,Arg3]) ->
+ Loop = #c_var{name={'lists^mapfoldr',2}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ Avar = #c_var{name='A'},
+ Match =
+ fun (A, P, E) ->
+ C1 = #c_clause{pats=[P], guard=#c_literal{val=true}, body=E},
+ Err = #c_tuple{es=[#c_literal{val='badmatch'}, X]},
+ C2 = #c_clause{pats=[X], guard=#c_literal{val=true},
+ body=match_fail(Anno, Err)},
+ #c_case{arg=A, clauses=[C1, C2]}
+ end,
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+%%% Tuple passing version
+ body=Match(#c_apply{anno=Anno,
+ op=Loop,
+ args=[Xs, Avar]},
+ #c_tuple{es=[Xs, Avar]},
+ Match(#c_apply{anno=Anno, op=F, args=[X, Avar]},
+ #c_tuple{es=[X, Avar]},
+ #c_tuple{anno=[compiler_generated],
+ es=[#c_cons{anno=[compiler_generated],
+ hd=X, tl=Xs}, Avar]}))
+%%% Multiple-value version
+%%% body=#c_let{vars=[Xs,A],
+%%% %% The tuple will be optimised away
+%%% arg=#c_apply{op=Loop, args=[Xs, A]},
+%%% body=Match(#c_apply{op=F, args=[X, A]},
+%%% #c_tuple{es=[X, A]},
+%%% #c_values{es=[#c_cons{hd=X, tl=Xs},
+%%% A]})}
+ },
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=2}]},
+%%% Tuple passing version
+ body=#c_tuple{anno=[compiler_generated],
+ es=[#c_literal{val=[]}, Avar]}},
+%%% Multiple-value version
+%%% body=#c_values{es=[#c_literal{val=[]}, A]}},
+ Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Avar, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^mapfoldr',2}}|Anno], Err)},
+ Fun = #c_fun{vars=[Xs, Avar],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, Avar, L], arg=#c_values{es=[Arg1, Arg2, Arg3]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+%%% Tuple passing version
+ body=#c_apply{anno=Anno,
+ op=Loop,
+ args=[L, Avar]}}};
+%%% Multiple-value version
+%%% body=#c_let{vars=[Xs, A],
+%%% arg=#c_apply{op=Loop,
+%%% args=[L, A]},
+%%% body=#c_tuple{es=[Xs, A]}}}};
+call(_, _, _, _) ->
+ none.
+
+match_fail(Ann, Arg) ->
+ Name = cerl:abstract(match_fail),
+ Args = [Arg],
+ cerl:ann_c_primop(Ann, Name, Args).
diff --git a/lib/compiler/src/sys_core_inline.erl b/lib/compiler/src/sys_core_inline.erl
index 9f93acb666..1e3a735e9b 100644
--- a/lib/compiler/src/sys_core_inline.erl
+++ b/lib/compiler/src/sys_core_inline.erl
@@ -195,10 +195,10 @@ kill_id_anns(Body) ->
A = kill_id_anns_1(A0),
CFun#c_fun{anno=A};
(Expr) ->
- %% Mark everything as compiler generated to suppress
- %% bogus warnings.
- A = compiler_generated(core_lib:get_anno(Expr)),
- core_lib:set_anno(Expr, A)
+ %% Mark everything as compiler generated to
+ %% suppress bogus warnings.
+ A = compiler_generated(cerl:get_ann(Expr)),
+ cerl:set_ann(Expr, A)
end, Body).
kill_id_anns_1([{'id',_}|As]) ->
diff --git a/lib/compiler/src/v3_core.erl b/lib/compiler/src/v3_core.erl
index 612660c2d6..9dd6b319a3 100644
--- a/lib/compiler/src/v3_core.erl
+++ b/lib/compiler/src/v3_core.erl
@@ -66,6 +66,7 @@
%% match arguments are novars
%% case arguments are novars
%% receive timeouts are novars
+%% binaries and maps are novars
%% let/set arguments are expressions
%% fun is not a safe
@@ -105,7 +106,9 @@
-record(iset, {anno=#a{},var,arg}).
-record(itry, {anno=#a{},args,vars,body,evars,handler}).
-record(ifilter, {anno=#a{},arg}).
--record(igen, {anno=#a{},acc_pat,acc_guard,skip_pat,tail,tail_pat,arg}).
+-record(igen, {anno=#a{},ceps=[],acc_pat,acc_guard,
+ skip_pat,tail,tail_pat,arg}).
+-record(isimple, {anno=#a{},term :: cerl:cerl()}).
-type iapply() :: #iapply{}.
-type ibinary() :: #ibinary{}.
@@ -124,11 +127,12 @@
-type itry() :: #itry{}.
-type ifilter() :: #ifilter{}.
-type igen() :: #igen{}.
+-type isimple() :: #isimple{}.
-type i() :: iapply() | ibinary() | icall() | icase() | icatch()
| iclause() | ifun() | iletrec() | imatch() | iprimop()
| iprotect() | ireceive1() | ireceive2() | iset() | itry()
- | ifilter() | igen().
+ | ifilter() | igen() | isimple().
-type warning() :: {file:filename(), [{integer(), module(), term()}]}.
@@ -287,13 +291,15 @@ gexpr({protect,Line,Arg}, Bools0, St0) ->
{#iprotect{anno=#a{anno=Anno},body=Eps++[E]},[],Bools0,St}
end;
gexpr({op,L,'andalso',E1,E2}, Bools, St0) ->
- {#c_var{name=V0},St} = new_var(L, St0),
+ Anno = lineno_anno(L, St0),
+ {#c_var{name=V0},St} = new_var(Anno, St0),
V = {var,L,V0},
False = {atom,L,false},
E = make_bool_switch_guard(L, E1, V, E2, False),
gexpr(E, Bools, St);
gexpr({op,L,'orelse',E1,E2}, Bools, St0) ->
- {#c_var{name=V0},St} = new_var(L, St0),
+ Anno = lineno_anno(L, St0),
+ {#c_var{name=V0},St} = new_var(Anno, St0),
V = {var,L,V0},
True = {atom,L,true},
E = make_bool_switch_guard(L, E1, V, True, E2),
@@ -382,33 +388,30 @@ gexpr_test(E0, Bools0, St0) ->
Lanno = Anno#a.anno,
{New,St2} = new_var(Lanno, St1),
Bools = [New|Bools0],
- {#icall{anno=Anno, %Must have an #a{}
- module=#c_literal{anno=Lanno,val=erlang},
- name=#c_literal{anno=Lanno,val='=:='},
- args=[New,#c_literal{anno=Lanno,val=true}]},
+ {icall_eq_true(New),
Eps0 ++ [#iset{anno=Anno,var=New,arg=E1}],Bools,St2}
end;
_ ->
- Anno = get_ianno(E1),
Lanno = get_lineno_anno(E1),
+ ACompGen = #a{anno=[compiler_generated]},
case is_simple(E1) of
true ->
Bools = [E1|Bools0],
- {#icall{anno=Anno, %Must have an #a{}
- module=#c_literal{anno=Lanno,val=erlang},
- name=#c_literal{anno=Lanno,val='=:='},
- args=[E1,#c_literal{anno=Lanno,val=true}]},Eps0,Bools,St1};
+ {icall_eq_true(E1),Eps0,Bools,St1};
false ->
{New,St2} = new_var(Lanno, St1),
Bools = [New|Bools0],
- {#icall{anno=Anno, %Must have an #a{}
- module=#c_literal{anno=Lanno,val=erlang},
- name=#c_literal{anno=Lanno,val='=:='},
- args=[New,#c_literal{anno=Lanno,val=true}]},
- Eps0 ++ [#iset{anno=Anno,var=New,arg=E1}],Bools,St2}
+ {icall_eq_true(New),
+ Eps0 ++ [#iset{anno=ACompGen,var=New,arg=E1}],Bools,St2}
end
end.
+icall_eq_true(Arg) ->
+ #icall{anno=#a{anno=[compiler_generated]},
+ module=#c_literal{val=erlang},
+ name=#c_literal{val='=:='},
+ args=[Arg,#c_literal{val=true}]}.
+
force_booleans(Vs0, E, Eps, St) ->
Vs1 = [set_anno(V, []) || V <- Vs0],
Vs = unforce(E, Eps, Vs1),
@@ -418,16 +421,15 @@ force_booleans_1([], E, Eps, St) ->
{E,Eps,St};
force_booleans_1([V|Vs], E0, Eps0, St0) ->
{E1,Eps1,St1} = force_safe(E0, St0),
- Lanno = element(2, V),
- Anno = #a{anno=Lanno},
- Call = #icall{anno=Anno,module=#c_literal{anno=Lanno,val=erlang},
- name=#c_literal{anno=Lanno,val=is_boolean},
+ ACompGen = #a{anno=[compiler_generated]},
+ Call = #icall{anno=ACompGen,module=#c_literal{val=erlang},
+ name=#c_literal{val=is_boolean},
args=[V]},
- {New,St} = new_var(Lanno, St1),
- Iset = #iset{anno=Anno,var=New,arg=Call},
+ {New,St} = new_var([], St1),
+ Iset = #iset{var=New,arg=Call},
Eps = Eps0 ++ Eps1 ++ [Iset],
- E = #icall{anno=Anno,
- module=#c_literal{anno=Lanno,val=erlang},name=#c_literal{anno=Lanno,val='and'},
+ E = #icall{anno=ACompGen,
+ module=#c_literal{val=erlang},name=#c_literal{val='and'},
args=[E1,New]},
force_booleans_1(Vs, E, Eps, St).
@@ -514,28 +516,28 @@ exprs([], St) -> {[],St}.
%% Generate an internal core expression.
expr({var,L,V}, St) -> {#c_var{anno=lineno_anno(L, St),name=V},[],St};
-expr({char,L,C}, St) -> {#c_literal{anno=lineno_anno(L, St),val=C},[],St};
-expr({integer,L,I}, St) -> {#c_literal{anno=lineno_anno(L, St),val=I},[],St};
-expr({float,L,F}, St) -> {#c_literal{anno=lineno_anno(L, St),val=F},[],St};
-expr({atom,L,A}, St) -> {#c_literal{anno=lineno_anno(L, St),val=A},[],St};
-expr({nil,L}, St) -> {#c_literal{anno=lineno_anno(L, St),val=[]},[],St};
-expr({string,L,S}, St) -> {#c_literal{anno=lineno_anno(L, St),val=S},[],St};
+expr({char,L,C}, St) -> {#c_literal{anno=full_anno(L, St),val=C},[],St};
+expr({integer,L,I}, St) -> {#c_literal{anno=full_anno(L, St),val=I},[],St};
+expr({float,L,F}, St) -> {#c_literal{anno=full_anno(L, St),val=F},[],St};
+expr({atom,L,A}, St) -> {#c_literal{anno=full_anno(L, St),val=A},[],St};
+expr({nil,L}, St) -> {#c_literal{anno=full_anno(L, St),val=[]},[],St};
+expr({string,L,S}, St) -> {#c_literal{anno=full_anno(L, St),val=S},[],St};
expr({cons,L,H0,T0}, St0) ->
{H1,Hps,St1} = safe(H0, St0),
{T1,Tps,St2} = safe(T0, St1),
- A = lineno_anno(L, St2),
+ A = full_anno(L, St2),
{annotate_cons(A, H1, T1, St2),Hps ++ Tps,St2};
expr({lc,L,E,Qs0}, St0) ->
{Qs1,St1} = preprocess_quals(L, Qs0, St0),
lc_tq(L, E, Qs1, #c_literal{anno=lineno_anno(L, St1),val=[]}, St1);
expr({bc,L,E,Qs}, St) ->
- bc_tq(L, E, Qs, {nil,L}, St);
+ bc_tq(L, E, Qs, St);
expr({tuple,L,Es0}, St0) ->
{Es1,Eps,St1} = safe_list(Es0, St0),
A = record_anno(L, St1),
{annotate_tuple(A, Es1, St1),Eps,St1};
expr({map,L,Es0}, St0) ->
- map_build_pair_chain(#c_literal{val=#{}},Es0,lineno_anno(L,St0),St0);
+ map_build_pairs(#c_literal{val=#{}}, Es0, full_anno(L, St0), St0);
expr({map,L,M0,Es0}, St0) ->
try expr_map(M0,Es0,lineno_anno(L, St0),St0) of
{_,_,_}=Res -> Res
@@ -550,7 +552,7 @@ expr({map,L,M0,Es0}, St0) ->
args=As},[],St}
end;
expr({bin,L,Es0}, St0) ->
- try expr_bin(Es0, lineno_anno(L, St0), St0) of
+ try expr_bin(Es0, full_anno(L, St0), St0) of
{_,_,_}=Res -> Res
catch
throw:bad_binary ->
@@ -640,11 +642,11 @@ expr({'catch',L,E0}, St0) ->
Lanno = lineno_anno(L, St1),
{#icatch{anno=#a{anno=Lanno},body=Eps ++ [E1]},[],St1};
expr({'fun',L,{function,F,A},{_,_,_}=Id}, St) ->
- Lanno = lineno_anno(L, St),
+ Lanno = full_anno(L, St),
{#c_var{anno=Lanno++[{id,Id}],name={F,A}},[],St};
expr({'fun',L,{function,M,F,A}}, St0) ->
{As,Aps,St1} = safe_list([M,F,A], St0),
- Lanno = lineno_anno(L, St1),
+ Lanno = full_anno(L, St1),
{#icall{anno=#a{anno=Lanno},
module=#c_literal{val=erlang},
name=#c_literal{val=make_fun},
@@ -655,13 +657,9 @@ expr({named_fun,L,'_',Cs,Id}, St) ->
fun_tq(Id, Cs, L, St, unnamed);
expr({named_fun,L,Name,Cs,Id}, St) ->
fun_tq(Id, Cs, L, St, {named,Name});
-expr({call,L,{remote,_,M,F},As0}, #core{wanted=Wanted}=St0) ->
+expr({call,L,{remote,_,M,F},As0}, St0) ->
{[M1,F1|As1],Aps,St1} = safe_list([M,F|As0], St0),
- Lanno = lineno_anno(L, St1),
- Anno = case Wanted of
- false -> [result_not_wanted|Lanno];
- true -> Lanno
- end,
+ Anno = full_anno(L, St1),
{#icall{anno=#a{anno=Anno},module=M1,name=F1,args=As1},Aps,St1};
expr({call,Lc,{atom,Lf,F},As0}, St0) ->
{As1,Aps,St1} = safe_list(As0, St0),
@@ -710,26 +708,28 @@ expr({op,_,'++',{lc,Llc,E,Qs0},More}, St0) ->
{Y,Yps,St} = lc_tq(Llc, E, Qs, Mc, St2),
{Y,Mps++Yps,St};
expr({op,L,'andalso',E1,E2}, St0) ->
- {#c_var{name=V0},St} = new_var(L, St0),
+ Anno = lineno_anno(L, St0),
+ {#c_var{name=V0},St} = new_var(Anno, St0),
V = {var,L,V0},
False = {atom,L,false},
E = make_bool_switch(L, E1, V, E2, False, St0),
expr(E, St);
expr({op,L,'orelse',E1,E2}, St0) ->
- {#c_var{name=V0},St} = new_var(L, St0),
+ Anno = lineno_anno(L, St0),
+ {#c_var{name=V0},St} = new_var(Anno, St0),
V = {var,L,V0},
True = {atom,L,true},
E = make_bool_switch(L, E1, V, True, E2, St0),
expr(E, St);
expr({op,L,Op,A0}, St0) ->
{A1,Aps,St1} = safe(A0, St0),
- LineAnno = lineno_anno(L, St1),
+ LineAnno = full_anno(L, St1),
{#icall{anno=#a{anno=LineAnno}, %Must have an #a{}
module=#c_literal{anno=LineAnno,val=erlang},
name=#c_literal{anno=LineAnno,val=Op},args=[A1]},Aps,St1};
expr({op,L,Op,L0,R0}, St0) ->
{As,Aps,St1} = safe_list([L0,R0], St0),
- LineAnno = lineno_anno(L, St1),
+ LineAnno = full_anno(L, St1),
{#icall{anno=#a{anno=LineAnno}, %Must have an #a{}
module=#c_literal{anno=LineAnno,val=erlang},
name=#c_literal{anno=LineAnno,val=Op},args=As},Aps,St1}.
@@ -778,66 +778,29 @@ expr_map(M0,Es0,A,St0) ->
Fc = fail_clause([Fpat], A, #c_literal{val=badarg}),
{#icase{anno=#a{anno=A},args=[M1],clauses=Cs,fc=Fc},Mps,St3};
{_,_} ->
- {M2,Eps,St2} = map_build_pair_chain(M1,Es0,A,St1),
+ {M2,Eps,St2} = map_build_pairs(M1, Es0, A, St1),
{M2,Mps++Eps,St2}
end;
false -> throw({bad_map,bad_map})
end.
-%% Group continuous literal blocks and single variables, i.e.
-%% M0#{ a := 1, b := V1, K1 := V2, K2 := 42}
-%% becomes equivalent to
-%% M1 = M0#{ a := 1, b := V1 },
-%% M2 = M1#{ K1 := V1 },
-%% M3 = M2#{ K2 := 42 }
-
-map_build_pair_chain(M,Es,A,St) ->
- %% hack, remove iset if only literal
- case map_build_pair_chain(M,Es,A,St,[]) of
- {_,[#iset{arg=#c_literal{}=Val}],St1} -> {Val,[],St1};
- Normal -> Normal
- end.
-
-map_build_pair_chain(M0,[],_,St,Mps) ->
- {M0,Mps,St};
-map_build_pair_chain(M0,Es0,A,St0,Mps) ->
- % group continuous literal blocks
- % Anno = #a{anno=[compiler_generated]},
- % order is important, we need to reverse the literals
- case map_pair_block(Es0,[],[],St0) of
- {{CesL,EspL},{[],[]},Es1,St1} ->
- {MVar,St2} = new_var(St1),
- Pre = [#iset{var=MVar, arg=ann_c_map(A,M0,reverse(CesL))}],
- map_build_pair_chain(MVar,Es1,A,St2,Mps++EspL++Pre);
- {{[],[]},{CesV,EspV},Es1,St1} ->
- {MVar,St2} = new_var(St1),
- Pre = [#iset{var=MVar, arg=#c_map{arg=M0,es=CesV, anno=A}}],
- map_build_pair_chain(MVar,Es1,A,St2,Mps ++ EspV++Pre);
- {{CesL,EspL},{CesV,EspV},Es1,St1} ->
- {MVarL,St2} = new_var(St1),
- {MVarV,St3} = new_var(St2),
- Pre = [#iset{var=MVarL, arg=ann_c_map(A,M0,reverse(CesL))},
- #iset{var=MVarV, arg=#c_map{arg=MVarL,es=CesV,anno=A}}],
- map_build_pair_chain(MVarV,Es1,A,St3,Mps++EspL++EspV++Pre)
- end.
-
-map_pair_block([{Op,L,K0,V0}|Es],Ces,Esp,St0) ->
- {K,Ep0,St1} = safe(K0, St0),
- {V,Ep1,St2} = safe(V0, St1),
- A = lineno_anno(L, St2),
- Pair0 = map_op_to_c_map_pair(Op),
- Pair1 = Pair0#c_map_pair{anno=A,key=K,val=V},
- case cerl:is_literal(K) of
- true ->
- map_pair_block(Es,[Pair1|Ces],Ep0 ++ Ep1 ++ Esp,St2);
- false ->
- {{Ces,Esp},{[Pair1],Ep0++Ep1},Es,St2}
- end;
-map_pair_block([],Ces,Esp,St) ->
- {{Ces,Esp},{[],[]},[],St}.
+map_build_pairs(Map, Es0, Ann, St0) ->
+ {Es,Pre,St1} = map_build_pairs_1(Es0, St0),
+ {ann_c_map(Ann, Map, Es),Pre,St1}.
+
+map_build_pairs_1([{Op0,L,K0,V0}|Es], St0) ->
+ {K,Pre0,St1} = safe(K0, St0),
+ {V,Pre1,St2} = safe(V0, St1),
+ {Pairs,Pre2,St3} = map_build_pairs_1(Es, St2),
+ As = lineno_anno(L, St3),
+ Op = map_op(Op0),
+ Pair = cerl:ann_c_map_pair(As, Op, K, V),
+ {[Pair|Pairs],Pre0++Pre1++Pre2,St3};
+map_build_pairs_1([], St) ->
+ {[],[],St}.
-map_op_to_c_map_pair(map_field_assoc) -> #c_map_pair{op=#c_literal{val=assoc}};
-map_op_to_c_map_pair(map_field_exact) -> #c_map_pair{op=#c_literal{val=exact}}.
+map_op(map_field_assoc) -> #c_literal{val=assoc};
+map_op(map_field_exact) -> #c_literal{val=exact}.
is_valid_map_src(#c_literal{val = M}) when is_map(M) -> true;
is_valid_map_src(#c_var{}) -> true;
@@ -1001,7 +964,7 @@ fun_tq({_,_,Name}=Id, Cs0, L, St0, NameInfo) ->
{Cs1,Ceps,St1} = clauses(Cs0, St0),
{Args,St2} = new_vars(Arity, St1),
{Ps,St3} = new_vars(Arity, St2), %Need new variables here
- Anno = lineno_anno(L, St3),
+ Anno = full_anno(L, St3),
Fc = function_clause(Ps, Anno, {Name,Arity}),
Fun = #ifun{anno=#a{anno=Anno},
id=[{id,Id}], %We KNOW!
@@ -1011,7 +974,8 @@ fun_tq({_,_,Name}=Id, Cs0, L, St0, NameInfo) ->
%% lc_tq(Line, Exp, [Qualifier], Mc, State) -> {LetRec,[PreExp],State}.
%% This TQ from Simon PJ pp 127-138.
-lc_tq(Line, E, [#igen{anno=GAnno,acc_pat=AccPat,acc_guard=AccGuard,
+lc_tq(Line, E, [#igen{anno=GAnno,ceps=Ceps,
+ acc_pat=AccPat,acc_guard=AccGuard,
skip_pat=SkipPat,tail=Tail,tail_pat=TailPat,
arg={Pre,Arg}}|Qs], Mc, St0) ->
{Name,St1} = new_fun_name("lc", St0),
@@ -1046,7 +1010,7 @@ lc_tq(Line, E, [#igen{anno=GAnno,acc_pat=AccPat,acc_guard=AccGuard,
Fun = #ifun{anno=LAnno,id=[],vars=[Var],clauses=Cs,fc=Fc},
{#iletrec{anno=LAnno#a{anno=[list_comprehension|LA]},defs=[{{Name,1},Fun}],
body=Pre ++ [#iapply{anno=LAnno,op=F,args=[Arg]}]},
- [],St4};
+ Ceps,St4};
lc_tq(Line, E, [#ifilter{}=Filter|Qs], Mc, St) ->
filter_tq(Line, E, Filter, Mc, St, Qs, fun lc_tq/5);
lc_tq(Line, E0, [], Mc0, St0) ->
@@ -1060,7 +1024,7 @@ lc_tq(Line, E0, [], Mc0, St0) ->
%% This TQ from Gustafsson ERLANG'05.
%% More could be transformed before calling bc_tq.
-bc_tq(Line, Exp, Qs0, _, St0) ->
+bc_tq(Line, Exp, Qs0, St0) ->
{BinVar,St1} = new_var(St0),
{Sz,SzPre,St2} = bc_initial_size(Exp, Qs0, St1),
{Qs,St3} = preprocess_quals(Line, Qs0, St2),
@@ -1071,7 +1035,8 @@ bc_tq(Line, Exp, Qs0, _, St0) ->
args=[Sz]}}] ++ BcPre,
{E,Pre,St}.
-bc_tq1(Line, E, [#igen{anno=GAnno,acc_pat=AccPat,acc_guard=AccGuard,
+bc_tq1(Line, E, [#igen{anno=GAnno,ceps=Ceps,
+ acc_pat=AccPat,acc_guard=AccGuard,
skip_pat=SkipPat,tail=Tail,tail_pat=TailPat,
arg={Pre,Arg}}|Qs], Mc, St0) ->
{Name,St1} = new_fun_name("lbc", St0),
@@ -1109,7 +1074,7 @@ bc_tq1(Line, E, [#igen{anno=GAnno,acc_pat=AccPat,acc_guard=AccGuard,
Fun = #ifun{anno=LAnno,id=[],vars=Vars,clauses=Cs,fc=Fc},
{#iletrec{anno=LAnno#a{anno=[list_comprehension|LA]},defs=[{{Name,2},Fun}],
body=Pre ++ [#iapply{anno=LAnno,op=F,args=[Arg,Mc]}]},
- [],St4};
+ Ceps,St4};
bc_tq1(Line, E, [#ifilter{}=Filter|Qs], Mc, St) ->
filter_tq(Line, E, Filter, Mc, St, Qs, fun bc_tq1/5);
bc_tq1(_, {bin,Bl,Elements}, [], AccVar, St0) ->
@@ -1245,8 +1210,9 @@ generator(Line, {generate,Lg,P0,E}, Gs, St0) ->
ann_c_cons(LA, Skip, Tail)}
end,
{Ce,Pre,St4} = safe(E, St3),
- Gen = #igen{anno=#a{anno=GA},acc_pat=AccPat,acc_guard=Cg,skip_pat=SkipPat,
- tail=Tail,tail_pat=#c_literal{anno=LA,val=[]},arg={Ceps++Pre,Ce}},
+ Gen = #igen{anno=#a{anno=GA},ceps=Ceps,
+ acc_pat=AccPat,acc_guard=Cg,skip_pat=SkipPat,
+ tail=Tail,tail_pat=#c_literal{anno=LA,val=[]},arg={Pre,Ce}},
{Gen,St4};
generator(Line, {b_generate,Lg,P,E}, Gs, St0) ->
LA = lineno_anno(Line, St0),
@@ -1515,6 +1481,7 @@ force_novars(#iapply{}=App, St) -> {App,[],St};
force_novars(#icall{}=Call, St) -> {Call,[],St};
force_novars(#ifun{}=Fun, St) -> {Fun,[],St}; %These are novars too
force_novars(#ibinary{}=Bin, St) -> {Bin,[],St};
+force_novars(#c_map{}=Bin, St) -> {Bin,[],St};
force_novars(Ce, St) ->
force_safe(Ce, St).
@@ -1634,7 +1601,7 @@ pattern({tuple,L,Ps}, St) ->
{annotate_tuple(record_anno(L, St), Ps1, St),Eps,St1};
pattern({map,L,Pairs}, St0) ->
{Ps,Eps,St1} = pattern_map_pairs(Pairs, St0),
- {#c_map{anno=lineno_anno(L, St1), es=Ps},Eps,St1};
+ {#c_map{anno=lineno_anno(L, St1),es=Ps,is_pat=true},Eps,St1};
pattern({bin,L,Ps}, St) ->
%% We don't create a #ibinary record here, since there is
%% no need to hold any used/new annotations in a pattern.
@@ -1794,7 +1761,7 @@ new_var_name(#core{vcount=C}=St) ->
new_var(St) ->
new_var([], St).
-new_var(Anno, St0) ->
+new_var(Anno, St0) when is_list(Anno) ->
{New,St} = new_var_name(St0),
{#c_var{anno=Anno,name=New},St}.
@@ -2021,11 +1988,11 @@ uexpr(#ibinary{anno=A,segments=Ss}, _, St) ->
uexpr(#c_literal{}=Lit, _, St) ->
Anno = get_anno(Lit),
{set_anno(Lit, #a{us=[],anno=Anno}),St};
-uexpr(Lit, _, St) ->
- true = is_simple(Lit), %Sanity check!
- Vs = lit_vars(Lit),
- Anno = get_anno(Lit),
- {set_anno(Lit, #a{us=Vs,anno=Anno}),St}.
+uexpr(Simple, _, St) ->
+ true = is_simple(Simple), %Sanity check!
+ Vs = lit_vars(Simple),
+ Anno = get_anno(Simple),
+ {#isimple{anno=#a{us=Vs,anno=Anno},term=Simple},St}.
uexpr_list(Les0, Ks, St0) ->
mapfoldl(fun (Le, St) -> uexpr(Le, Ks, St) end, St0, Les0).
@@ -2202,7 +2169,8 @@ cguard(Gs, St0) ->
cexprs([#iset{var=#c_var{name=Name}=Var}=Iset], As, St) ->
%% Make return value explicit, and make Var true top level.
- cexprs([Iset,Var#c_var{anno=#a{us=[Name]}}], As, St);
+ Isimple = #isimple{anno=#a{us=[Name]},term=Var},
+ cexprs([Iset,Isimple], As, St);
cexprs([Le], As, St0) ->
{Ce,Es,Us,St1} = cexpr(Le, As, St0),
Exp = make_vars(As), %The export variables
@@ -2317,12 +2285,9 @@ cexpr(#c_literal{}=Lit, _As, St) ->
Anno = get_anno(Lit),
Vs = Anno#a.us,
{set_anno(Lit, Anno#a.anno),[],Vs,St};
-cexpr(Lit, _As, St) ->
- true = is_simple(Lit), %Sanity check!
- Anno = get_anno(Lit),
- Vs = Anno#a.us,
- %%Vs = lit_vars(Lit),
- {set_anno(Lit, Anno#a.anno),[],Vs,St}.
+cexpr(#isimple{anno=#a{us=Vs},term=Simple}, _As, St) ->
+ true = is_simple(Simple), %Sanity check!
+ {Simple,[],Vs,St}.
cfun(#ifun{anno=A,id=Id,vars=Args,clauses=Lcs,fc=Lfc}, _As, St0) ->
{Ccs,St1} = cclauses(Lcs, [], St0), %NEVER export!
@@ -2345,11 +2310,6 @@ lit_vars(#c_map_pair{key=K,val=V}, Vs) -> lit_vars(K, lit_vars(V, Vs));
lit_vars(#c_var{name=V}, Vs) -> add_element(V, Vs);
lit_vars(_, Vs) -> Vs. %These are atomic
-% lit_bin_vars(Segs, Vs) ->
-% foldl(fun (#c_bitstr{val=V,size=S}, Vs0) ->
-% lit_vars(V, lit_vars(S, Vs0))
-% end, Vs, Segs).
-
lit_list_vars(Ls) -> lit_list_vars(Ls, []).
lit_list_vars(Ls, Vs) ->
@@ -2368,16 +2328,21 @@ record_anno(L, St) when L >= ?REC_OFFSET ->
true ->
[record | lineno_anno(L - ?REC_OFFSET, St)];
false ->
- lineno_anno(L, St)
+ full_anno(L, St)
end;
record_anno(L, St) when L < -?REC_OFFSET ->
case member(dialyzer, St#core.opts) of
true ->
[record | lineno_anno(L + ?REC_OFFSET, St)];
false ->
- lineno_anno(L, St)
+ full_anno(L, St)
end;
record_anno(L, St) ->
+ full_anno(L, St).
+
+full_anno(L, #core{wanted=false}=St) ->
+ [result_not_wanted|lineno_anno(L, St)];
+full_anno(L, #core{wanted=true}=St) ->
lineno_anno(L, St).
lineno_anno(L, St) ->
diff --git a/lib/compiler/src/v3_kernel.erl b/lib/compiler/src/v3_kernel.erl
index 6504351c02..08e84efc1b 100644
--- a/lib/compiler/src/v3_kernel.erl
+++ b/lib/compiler/src/v3_kernel.erl
@@ -131,12 +131,12 @@ module(#c_module{anno=A,name=M,exports=Es,attrs=As,defs=Fs}, _Options) ->
{ok,#k_mdef{anno=A,name=M#c_literal.val,exports=Kes,attributes=Kas,
body=Kfs ++ St#kern.funs},lists:sort(St#kern.ws)}.
-attributes([{#c_literal{val=Name},Val}|As]) ->
+attributes([{#c_literal{val=Name},#c_literal{val=Val}}|As]) ->
case include_attribute(Name) of
false ->
attributes(As);
true ->
- [{Name,core_lib:literal_value(Val)}|attributes(As)]
+ [{Name,Val}|attributes(As)]
end;
attributes([]) -> [].
@@ -521,62 +521,76 @@ is_valid_map_src(#k_var{}) -> true;
is_valid_map_src(_) -> false.
map_split_pairs(A, Var, Ces, Sub, St0) ->
- %% two steps
- %% 1. force variables
- %% 2. remove multiples
- Pairs0 = [{Op,K,V} || #c_map_pair{op=#c_literal{val=Op},key=K,val=V} <- Ces],
+ %% 1. Force variables.
+ %% 2. Group adjacent pairs with literal keys.
+ %% 3. Within each such group, remove multiple assignments to the same key.
+ %% 4. Partition each group according to operator ('=>' and ':=').
+ Pairs0 = [{Op,K,V} ||
+ #c_map_pair{op=#c_literal{val=Op},key=K,val=V} <- Ces],
{Pairs,Esp,St1} = foldr(fun
({Op,K0,V0}, {Ops,Espi,Sti0}) when Op =:= assoc; Op =:= exact ->
{K,Eps1,Sti1} = atomic(K0, Sub, Sti0),
{V,Eps2,Sti2} = atomic(V0, Sub, Sti1),
{[{Op,K,V}|Ops],Eps1 ++ Eps2 ++ Espi,Sti2}
end, {[],[],St0}, Pairs0),
-
- case map_group_pairs(Pairs) of
- {Assoc,[]} ->
- Kes = [#k_map_pair{key=K,val=V}||{_,{assoc,K,V}} <- Assoc],
- {#k_map{anno=A,op=assoc,var=Var,es=Kes},Esp,St1};
- {[],Exact} ->
- Kes = [#k_map_pair{key=K,val=V}||{_,{exact,K,V}} <- Exact],
- {#k_map{anno=A,op=exact,var=Var,es=Kes},Esp,St1};
- {Assoc,Exact} ->
- Kes1 = [#k_map_pair{key=K,val=V}||{_,{assoc,K,V}} <- Assoc],
- {Mvar,Em,St2} = force_atomic(#k_map{anno=A,op=assoc,var=Var,es=Kes1},St1),
- Kes2 = [#k_map_pair{key=K,val=V}||{_,{exact,K,V}} <- Exact],
- {#k_map{anno=A,op=exact,var=Mvar,es=Kes2},Esp ++ Em,St2}
-
+ map_split_pairs_1(A, Var, Pairs, Esp, St1).
+
+map_split_pairs_1(A, Map0, [{Op,Key,Val}|Pairs1]=Pairs0, Esp0, St0) ->
+ {Map1,Em,St1} = force_atomic(Map0, St0),
+ case Key of
+ #k_var{} ->
+ %% Don't combine variable keys with other keys.
+ Kes = [#k_map_pair{key=Key,val=Val}],
+ Map = #k_map{anno=A,op=Op,var=Map1,es=Kes},
+ map_split_pairs_1(A, Map, Pairs1, Esp0 ++ Em, St1);
+ _ ->
+ %% Literal key. Split off all literal keys.
+ {L,Pairs} = splitwith(fun({_,#k_var{},_}) -> false;
+ ({_,_,_}) -> true
+ end, Pairs0),
+ {Map,Esp,St2} = map_group_pairs(A, Map1, L, Esp0 ++ Em, St1),
+ map_split_pairs_1(A, Map, Pairs, Esp, St2)
+ end;
+map_split_pairs_1(_, Map, [], Esp, St0) ->
+ {Map,Esp,St0}.
+
+map_group_pairs(A, Var, Pairs0, Esp, St0) ->
+ Pairs = map_remove_dup_keys(Pairs0),
+ Assoc = [#k_map_pair{key=K,val=V} || {_,{assoc,K,V}} <- Pairs],
+ Exact = [#k_map_pair{key=K,val=V} || {_,{exact,K,V}} <- Pairs],
+ case {Assoc,Exact} of
+ {[_|_],[]} ->
+ {#k_map{anno=A,op=assoc,var=Var,es=Assoc},Esp,St0};
+ {[],[_|_]} ->
+ {#k_map{anno=A,op=exact,var=Var,es=Exact},Esp,St0};
+ {[_|_],[_|_]} ->
+ Map = #k_map{anno=A,op=assoc,var=Var,es=Assoc},
+ {Mvar,Em,St1} = force_atomic(Map, St0),
+ {#k_map{anno=A,op=exact,var=Mvar,es=Exact},Esp ++ Em,St1}
end.
-%% Group map by Assoc operations and Exact operations
-
-map_group_pairs(Es) ->
- Groups = dict:to_list(map_group_pairs(Es,dict:new())),
- partition(fun({_,{Op,_,_}}) -> Op =:= assoc end, Groups).
-
-map_group_pairs([{assoc,K,V}|Es0],Used0) ->
- Used1 = case map_key_is_used(K,Used0) of
- {ok, {assoc,_,_}} -> map_key_set_used(K,{assoc,K,V},Used0);
- {ok, {exact,_,_}} -> map_key_set_used(K,{exact,K,V},Used0);
- _ -> map_key_set_used(K,{assoc,K,V},Used0)
- end,
- map_group_pairs(Es0,Used1);
-map_group_pairs([{exact,K,V}|Es0],Used0) ->
- Used1 = case map_key_is_used(K,Used0) of
- {ok, {assoc,_,_}} -> map_key_set_used(K,{assoc,K,V},Used0);
- {ok, {exact,_,_}} -> map_key_set_used(K,{exact,K,V},Used0);
- _ -> map_key_set_used(K,{exact,K,V},Used0)
- end,
- map_group_pairs(Es0,Used1);
-map_group_pairs([],Used) ->
- Used.
+map_remove_dup_keys(Es) ->
+ dict:to_list(map_remove_dup_keys(Es, dict:new())).
-map_key_set_used(K,How,Used) ->
- dict:store(map_key_clean(K),How,Used).
-
-map_key_is_used(K,Used) ->
- dict:find(map_key_clean(K),Used).
+map_remove_dup_keys([{assoc,K0,V}|Es0],Used0) ->
+ K = map_key_clean(K0),
+ Op = case dict:find(K, Used0) of
+ {ok,{exact,_,_}} -> exact;
+ _ -> assoc
+ end,
+ Used1 = dict:store(K, {Op,K0,V}, Used0),
+ map_remove_dup_keys(Es0, Used1);
+map_remove_dup_keys([{exact,K0,V}|Es0],Used0) ->
+ K = map_key_clean(K0),
+ Op = case dict:find(K, Used0) of
+ {ok,{assoc,_,_}} -> assoc;
+ _ -> exact
+ end,
+ Used1 = dict:store(K, {Op,K0,V}, Used0),
+ map_remove_dup_keys(Es0, Used1);
+map_remove_dup_keys([], Used) -> Used.
-%% Be explicit instead of using set_kanno(K,[])
+%% Be explicit instead of using set_kanno(K, []).
map_key_clean(#k_var{name=V}) -> {var,V};
map_key_clean(#k_literal{val=V}) -> {lit,V};
map_key_clean(#k_int{val=V}) -> {lit,V};
@@ -661,12 +675,12 @@ atomic_bin([#c_bitstr{anno=A,val=E0,size=S0,unit=U0,type=T,flags=Fs0}|Es0],
{E,Ap1,St1} = atomic(E0, Sub, St0),
{S1,Ap2,St2} = atomic(S0, Sub, St1),
validate_bin_element_size(S1),
- U1 = core_lib:literal_value(U0),
- Fs1 = core_lib:literal_value(Fs0),
+ U1 = cerl:concrete(U0),
+ Fs1 = cerl:concrete(Fs0),
{Es,Ap3,St3} = atomic_bin(Es0, Sub, St2),
{#k_bin_seg{anno=A,size=S1,
unit=U1,
- type=core_lib:literal_value(T),
+ type=cerl:concrete(T),
flags=Fs1,
seg=E,next=Es},
Ap1++Ap2++Ap3,St3};
@@ -793,8 +807,8 @@ pattern_bin_1([#c_bitstr{anno=A,val=E0,size=S0,unit=U,type=T,flags=Fs}|Es0],
%% problems.
#k_atom{val=bad_size}
end,
- U0 = core_lib:literal_value(U),
- Fs0 = core_lib:literal_value(Fs),
+ U0 = cerl:concrete(U),
+ Fs0 = cerl:concrete(Fs),
%%ok= io:fwrite("~w: ~p~n", [?LINE,{B0,S,U0,Fs0}]),
{E,Osub1,St2} = pattern(E0, Isub0, Osub0, St1),
Isub1 = case E0 of
@@ -805,7 +819,7 @@ pattern_bin_1([#c_bitstr{anno=A,val=E0,size=S0,unit=U,type=T,flags=Fs}|Es0],
{Es,{Isub,Osub},St3} = pattern_bin_1(Es0, Isub1, Osub1, St2),
{#k_bin_seg{anno=A,size=S,
unit=U0,
- type=core_lib:literal_value(T),
+ type=cerl:concrete(T),
flags=Fs0,
seg=E,next=Es},
{Isub,Osub},St3};
diff --git a/lib/compiler/test/Makefile b/lib/compiler/test/Makefile
index 892a401c75..73d52a48bc 100644
--- a/lib/compiler/test/Makefile
+++ b/lib/compiler/test/Makefile
@@ -11,6 +11,7 @@ MODULES= \
beam_validator_SUITE \
beam_disasm_SUITE \
beam_except_SUITE \
+ beam_utils_SUITE \
bs_bincomp_SUITE \
bs_bit_binaries_SUITE \
bs_construct_SUITE \
@@ -40,6 +41,7 @@ NO_OPT= \
andor \
apply \
beam_except \
+ beam_utils \
bs_construct \
bs_match \
bs_utf \
@@ -59,6 +61,7 @@ NO_OPT= \
INLINE= \
andor \
apply \
+ beam_utils \
bs_bincomp \
bs_bit_binaries \
bs_construct \
diff --git a/lib/compiler/test/andor_SUITE.erl b/lib/compiler/test/andor_SUITE.erl
index b5408ecd8f..3199440d84 100644
--- a/lib/compiler/test/andor_SUITE.erl
+++ b/lib/compiler/test/andor_SUITE.erl
@@ -33,7 +33,7 @@ all() ->
[{group,p}].
groups() ->
- [{p,test_lib:parallel(),
+ [{p,[parallel],
[t_case,t_and_or,t_andalso,t_orelse,inside,overlap,
combined,in_case,before_and_inside_if]}].
@@ -173,7 +173,13 @@ t_and_or(Config) when is_list(Config) ->
true = (fun (X = true) when X or true or X -> true end)(True),
- ok.
+ Tuple = id({a,b}),
+ case Tuple of
+ {_,_} ->
+ {'EXIT',{badarg,_}} = (catch true and Tuple)
+ end,
+
+ ok.
t_andalso(Config) when is_list(Config) ->
Bs = [true,false],
diff --git a/lib/compiler/test/beam_utils_SUITE.erl b/lib/compiler/test/beam_utils_SUITE.erl
new file mode 100644
index 0000000000..d2e24cb5ae
--- /dev/null
+++ b/lib/compiler/test/beam_utils_SUITE.erl
@@ -0,0 +1,236 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(beam_utils_SUITE).
+
+-export([all/0,suite/0,groups/0,init_per_suite/1,end_per_suite/1,
+ init_per_group/2,end_per_group/2,
+ apply_fun/1,apply_mf/1,bs_init/1,bs_save/1,
+ is_not_killed/1,is_not_used_at/1,
+ select/1,y_catch/1]).
+-export([id/1]).
+
+suite() -> [{ct_hooks,[ts_install_cth]}].
+
+all() ->
+ test_lib:recompile(?MODULE),
+ [{group,p}].
+
+groups() ->
+ [{p,[parallel],
+ [apply_fun,
+ apply_mf,
+ bs_init,
+ bs_save,
+ is_not_killed,
+ is_not_used_at,
+ select,
+ y_catch
+ ]}].
+
+init_per_suite(Config) ->
+ Config.
+
+end_per_suite(_Config) ->
+ ok.
+
+init_per_group(_GroupName, Config) ->
+ Config.
+
+end_per_group(_GroupName, Config) ->
+ Config.
+
+apply_fun(_Config) ->
+ 3 = do_apply_fun(false, false),
+ 3 = do_apply_fun(false, true),
+ 3 = do_apply_fun(true, false),
+ 2 = do_apply_fun(true, true),
+ ok.
+
+do_apply_fun(X, Y) ->
+ F = fun(I) -> I+1 end,
+ Arg = case X andalso id(Y) of
+ true -> 1;
+ false -> 2
+ end,
+ F(Arg).
+
+apply_mf(_Config) ->
+ ok = do_apply_mf_used({a,b}, ?MODULE, id),
+ error = do_apply_mf_used([a], ?MODULE, id),
+ {'EXIT',{{case_clause,{[],b}},_}} = (catch do_apply_mf_used({[],b}, ?MODULE, id)),
+
+ error = do_apply_mf_killed({error,[a]}, ?MODULE, id),
+ ok = do_apply_mf_killed([b], ?MODULE, id),
+ {'EXIT',{{case_clause,{a,[b]}},_}} = (catch do_apply_mf_killed({a,[b]}, ?MODULE, id)),
+ {'EXIT',{{case_clause,{error,[]}},_}} = (catch do_apply_mf_killed({error,[]}, ?MODULE, id)),
+
+ ok.
+
+do_apply_mf_used(Arg, Mod, Func) ->
+ Res = case id(Arg) of
+ {Decoded,_} when Decoded =/= [] ->
+ ok;
+ List when is_list(List) ->
+ error
+ end,
+ Mod:Func(Res).
+
+do_apply_mf_killed(Arg, Mod, Func) ->
+ Res = case id(Arg) of
+ {Tag,Decoded} when Decoded =/= [], Tag =:= error ->
+ error;
+ List when is_list(List) ->
+ ok
+ end,
+ Mod:Func(Res).
+
+bs_init(_Config) ->
+ <<7>> = do_bs_init_1([?MODULE], 7),
+ error = do_bs_init_1([?MODULE], 0.0),
+ error = do_bs_init_1([?MODULE], -43),
+ error = do_bs_init_1([?MODULE], 42),
+
+ <<>> = do_bs_init_2([]),
+ <<0:32,((1 bsl 32)-1):32>> = do_bs_init_2([0,(1 bsl 32)-1]),
+ {'EXIT',{badarg,_}} = (catch do_bs_init_2([0.5])),
+ {'EXIT',{badarg,_}} = (catch do_bs_init_2([-1])),
+ {'EXIT',{badarg,_}} = (catch do_bs_init_2([1 bsl 32])),
+ ok.
+
+do_bs_init_1([?MODULE], Sz) ->
+ if
+ is_integer(Sz), Sz >= -42, Sz < 42 ->
+ id(<<Sz:8>>);
+ true ->
+ error
+ end.
+
+do_bs_init_2(SigNos) ->
+ << <<SigNo:32>> ||
+ SigNo <- SigNos,
+ (is_integer(SigNo) andalso SigNo >= 0 andalso SigNo < (1 bsl 32)) orelse
+ erlang:error(badarg)
+ >>.
+
+
+bs_save(_Config) ->
+ {a,30,<<>>} = do_bs_save(<<1:1,30:5>>),
+ {b,127,<<>>} = do_bs_save(<<1:1,31:5,0:1,127:7>>),
+ {c,127,<<>>} = do_bs_save(<<1:1,31:5,1:1,127:7>>),
+ {c,127,<<>>} = do_bs_save(<<0:1,31:5,1:1,127:7>>),
+ {d,1024,<<>>} = do_bs_save(<<0:1,31:5>>),
+ ok.
+
+do_bs_save(<<_:1, Tag:5, T/binary>>) when Tag < 31 ->
+ {a,Tag,T};
+do_bs_save(<<1:1, 31:5, 0:1, Tag:7, T/binary>>) ->
+ {b,Tag,T};
+do_bs_save(<<_:1, 31:5, 1:1, Tag:7, T/binary>>) ->
+ {c,Tag,T};
+do_bs_save(<<_:1, 31:5, T/binary>>) ->
+ {d,1024,T}.
+
+is_not_killed(_Config) ->
+ {Pid,Ref} = spawn_monitor(fun() -> exit(banan) end),
+ receive
+ {'DOWN', Ref, process, Pid, banan} ->
+ ok
+ end,
+ receive after 0 -> ok end.
+
+is_not_used_at(_Config) ->
+ {a,b} = do_is_not_used_at(a, [{a,b}]),
+ {a,b} = do_is_not_used_at(a, [x,{a,b}]),
+ {a,b} = do_is_not_used_at(a, [{x,y},{a,b}]),
+ none = do_is_not_used_at(z, [{a,b}]),
+ none = do_is_not_used_at(a, [x]),
+ none = do_is_not_used_at(a, [{x,y}]),
+ ok.
+
+do_is_not_used_at(Key, [P|Ps]) ->
+ if
+ tuple_size(P) >= 1, element(1, P) =:= Key ->
+ P;
+ true ->
+ do_is_not_used_at(Key, Ps)
+ end;
+do_is_not_used_at(_Key, []) -> none.
+
+-record(select, {fixed=false}).
+
+select(_Config) ->
+ a = do_select(#select{}, 0, 0),
+ b = do_select(#select{}, 0, 1),
+ c = do_select(#select{fixed=true}, 0, 0),
+ c = do_select(#select{fixed=true}, 0, 1),
+ ok.
+
+do_select(Head, OldSize, BSize) ->
+ Overwrite0 =
+ if
+ OldSize =:= BSize -> same;
+ true -> true
+ end,
+ Overwrite =
+ if
+ Head#select.fixed =/= false ->
+ false;
+ true ->
+ Overwrite0
+ end,
+ if
+ Overwrite =:= same ->
+ a;
+ Overwrite ->
+ b;
+ true ->
+ c
+ end.
+
+y_catch(_Config) ->
+ ok = try
+ do_y_catch(<<"<?xmlX">>, {state}),
+ failed
+ catch
+ throw:{<<"<?xmlX">>,{state}} ->
+ ok
+ end.
+
+do_y_catch(<<"<?xml",Rest0/binary>> = Bytes, State0) ->
+ {Rest1,State1} =
+ case do_y_catch_1(Rest0, State0) of
+ false ->
+ {Bytes,State0};
+ true ->
+ {_XmlAttributes, R, S} = do_y_catch_2(Rest0),
+ {R,S}
+ end,
+ case catch id({Rest1,State1}) of
+ Other ->
+ throw(Other)
+ end.
+
+do_y_catch_1(<<_,_/binary>>, _) ->
+ false.
+
+do_y_catch_2(_) -> {a,b,c}.
+
+
+%% The identity function.
+id(I) -> I.
diff --git a/lib/compiler/test/beam_validator_SUITE.erl b/lib/compiler/test/beam_validator_SUITE.erl
index 626f89ba7a..c441f9f284 100644
--- a/lib/compiler/test/beam_validator_SUITE.erl
+++ b/lib/compiler/test/beam_validator_SUITE.erl
@@ -21,16 +21,17 @@
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2,
init_per_testcase/2,end_per_testcase/2,
- beam_files/1,compiler_bug/1,stupid_but_valid/1,
+ compiler_bug/1,stupid_but_valid/1,
xrange/1,yrange/1,stack/1,call_last/1,merge_undefined/1,
uninit/1,unsafe_catch/1,
- dead_code/1,mult_labels/1,
+ dead_code/1,
overwrite_catchtag/1,overwrite_trytag/1,accessing_tags/1,bad_catch_try/1,
cons_guard/1,
freg_range/1,freg_uninit/1,freg_state/1,
- bin_match/1,bad_bin_match/1,bin_aligned/1,bad_dsetel/1,
+ bad_bin_match/1,bin_aligned/1,bad_dsetel/1,
state_after_fault_in_catch/1,no_exception_in_catch/1,
- undef_label/1,illegal_instruction/1,failing_gc_guard_bif/1]).
+ undef_label/1,illegal_instruction/1,failing_gc_guard_bif/1,
+ map_field_lists/1]).
-include_lib("test_server/include/test_server.hrl").
@@ -47,18 +48,19 @@ suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
test_lib:recompile(?MODULE),
- [beam_files,{group,p}].
+ [{group,p}].
groups() ->
[{p,test_lib:parallel(),
[compiler_bug,stupid_but_valid,xrange,
yrange,stack,call_last,merge_undefined,uninit,
- unsafe_catch,dead_code,mult_labels,
+ unsafe_catch,dead_code,
overwrite_catchtag,overwrite_trytag,accessing_tags,
bad_catch_try,cons_guard,freg_range,freg_uninit,
- freg_state,bin_match,bad_bin_match,bin_aligned,bad_dsetel,
+ freg_state,bad_bin_match,bin_aligned,bad_dsetel,
state_after_fault_in_catch,no_exception_in_catch,
- undef_label,illegal_instruction,failing_gc_guard_bif]}].
+ undef_label,illegal_instruction,failing_gc_guard_bif,
+ map_field_lists]}].
init_per_suite(Config) ->
Config.
@@ -72,33 +74,19 @@ init_per_group(_GroupName, Config) ->
end_per_group(_GroupName, Config) ->
Config.
-
-beam_files(Config) when is_list(Config) ->
- ?line DataDir = proplists:get_value(data_dir, Config),
- ?line Wc = filename:join([DataDir,"..","..","*","*.beam"]),
- %% Must have at least two files here, or there will be
- %% a grammatical error in the output of the io:format/2 call below. ;-)
- ?line [_,_|_] = Fs = filelib:wildcard(Wc),
- ?line io:format("~p files\n", [length(Fs)]),
- test_lib:p_run(fun do_beam_file/1, Fs).
-
-
-do_beam_file(F) ->
- case beam_validator:file(F) of
- ok ->
- ok;
- {error,Es} ->
- io:format("File: ~s", [F]),
- io:format("Error: ~p\n", [Es]),
- error
- end.
-
compiler_bug(Config) when is_list(Config) ->
%% Check that the compiler returns an error if we try to
%% assemble one of the bad '.S' files.
- ?line Data = ?config(data_dir, Config),
- ?line File = filename:join(Data, "stack"),
- ?line error = compile:file(File, [asm,report_errors,binary,time]),
+ Data = ?config(data_dir, Config),
+ File = filename:join(Data, "compiler_bug"),
+ error = compile:file(File, [from_asm,report_errors,time]),
+
+ %% Make sure that the error was reported by
+ %% the beam_validator module.
+ {error,
+ [{"compiler_bug",
+ [{beam_validator,_}]}],
+ []} = compile:file(File, [from_asm,return_errors,time]),
ok.
%% The following code is stupid but it should compile.
@@ -145,14 +133,14 @@ yrange(Config) when is_list(Config) ->
stack(Config) when is_list(Config) ->
Errors = do_val(stack, Config),
- ?line [{{t,a,2},{return,11,{stack_frame,2}}},
- {{t,b,2},{{deallocate,2},4,{allocated,none}}},
- {{t,c,2},{{deallocate,2},12,{allocated,none}}},
- {{t,d,2},
- {{allocate,2,2},5,{existing_stack_frame,{size,2}}}},
- {{t,e,2},{{deallocate,5},6,{allocated,2}}},
- {{t,bad_1,0},{{allocate_zero,2,10},4,{{x,9},not_live}}},
- {{t,bad_2,0},{{move,{y,0},{x,0}},5,{unassigned,{y,0}}}}] = Errors,
+ [{{t,a,2},{return,11,{stack_frame,2}}},
+ {{t,b,2},{{deallocate,2},4,{allocated,none}}},
+ {{t,bad_1,0},{{allocate_zero,2,10},4,{{x,9},not_live}}},
+ {{t,bad_2,0},{{move,{y,0},{x,0}},5,{unassigned,{y,0}}}},
+ {{t,c,2},{{deallocate,2},12,{allocated,none}}},
+ {{t,d,2},
+ {{allocate,2,2},5,{existing_stack_frame,{size,2}}}},
+ {{t,e,2},{{deallocate,5},6,{allocated,2}}}] = Errors,
ok.
call_last(Config) when is_list(Config) ->
@@ -166,10 +154,10 @@ call_last(Config) when is_list(Config) ->
merge_undefined(Config) when is_list(Config) ->
Errors = do_val(merge_undefined, Config),
- ?line [{{t,handle_call,2},
- {{call_ext,2,{extfunc,debug,filter,2}},
- 22,
- {uninitialized_reg,{y,0}}}}] = Errors,
+ [{{t,handle_call,2},
+ {{call_ext,2,{extfunc,debug,filter,2}},
+ 23,
+ {uninitialized_reg,{y,0}}}}] = Errors,
ok.
uninit(Config) when is_list(Config) ->
@@ -178,7 +166,7 @@ uninit(Config) when is_list(Config) ->
[{{t,sum_1,2},
{{move,{y,0},{x,0}},5,{uninitialized_reg,{y,0}}}},
{{t,sum_2,2},
- {{call,1,{f,10}},6,{uninitialized_reg,{y,0}}}},
+ {{call,1,{f,8}},6,{uninitialized_reg,{y,0}}}},
{{t,sum_3,2},
{{bif,'+',{f,0},[{x,0},{y,0}],{x,0}},
7,
@@ -199,10 +187,6 @@ dead_code(Config) when is_list(Config) ->
[] = do_val(dead_code, Config),
ok.
-mult_labels(Config) when is_list(Config) ->
- [] = do_val(erl_prim_loader, Config, ".beam"),
- ok.
-
overwrite_catchtag(Config) when is_list(Config) ->
Errors = do_val(overwrite_catchtag, Config),
?line
@@ -219,11 +203,10 @@ overwrite_trytag(Config) when is_list(Config) ->
accessing_tags(Config) when is_list(Config) ->
Errors = do_val(accessing_tags, Config),
- ?line
- [{{accessing_tags,foo,1},
- {{move,{y,0},{x,0}},6,{catchtag,_}}},
- {{accessing_tags,bar,1},
- {{move,{y,0},{x,0}},6,{trytag,_}}}] = Errors,
+ [{{accessing_tags,bar,1},
+ {{move,{y,0},{x,0}},6,{trytag,_}}},
+ {{accessing_tags,foo,1},
+ {{move,{y,0},{x,0}},6,{catchtag,_}}}] = Errors,
ok.
bad_catch_try(Config) when is_list(Config) ->
@@ -310,13 +293,6 @@ freg_state(Config) when is_list(Config) ->
{fclearerror,5,{bad_floating_point_state,cleared}}}] = Errors,
ok.
-bin_match(Config) when is_list(Config) ->
- Errors = do_val(bin_match, Config),
- ?line
- [{{t,t,1},{{bs_save,0},4,no_bs_match_state}},
- {{t,x,1},{{bs_restore,1},16,{no_save_point,1}}}] = Errors,
- ok.
-
bad_bin_match(Config) when is_list(Config) ->
[{{t,t,1},{return,5,{match_context,{x,0}}}}] =
do_val(bad_bin_match, Config),
@@ -340,20 +316,20 @@ bad_dsetel(Config) when is_list(Config) ->
?line
[{{t,t,1},
{{set_tuple_element,{x,1},{x,0},1},
- 15,
+ 17,
illegal_context_for_set_tuple_element}}] = Errors,
ok.
state_after_fault_in_catch(Config) when is_list(Config) ->
Errors = do_val(state_after_fault_in_catch, Config),
- [{{t,foo,1},
- {{move,{x,1},{x,0}},10,{uninitialized_reg,{x,1}}}},
- {{state_after_fault_in_catch,if_end,1},
+ [{{state_after_fault_in_catch,badmatch,1},
{{move,{x,1},{x,0}},9,{uninitialized_reg,{x,1}}}},
{{state_after_fault_in_catch,case_end,1},
{{move,{x,1},{x,0}},9,{uninitialized_reg,{x,1}}}},
- {{state_after_fault_in_catch,badmatch,1},
- {{move,{x,1},{x,0}},9,{uninitialized_reg,{x,1}}}}] = Errors,
+ {{state_after_fault_in_catch,if_end,1},
+ {{move,{x,1},{x,0}},9,{uninitialized_reg,{x,1}}}},
+ {{t,foo,1},
+ {{move,{x,1},{x,0}},10,{uninitialized_reg,{x,1}}}}] = Errors,
ok.
no_exception_in_catch(Config) when is_list(Config) ->
@@ -363,13 +339,46 @@ no_exception_in_catch(Config) when is_list(Config) ->
ok.
undef_label(Config) when is_list(Config) ->
- Errors = do_val(undef_label, Config),
+ M = {undef_label,
+ [{t,1}],
+ [],
+ [{function,t,1,2,
+ [{label,1},
+ {func_info,{atom,undef_label},{atom,t},1},
+ {label,2},
+ {test,is_eq_exact,{f,42},[{x,0},{atom,x}]},
+ {move,{atom,ok},{x,0}},
+ return]},
+ {function,x,1,17,
+ [{label,3},
+ {func_info,{atom,undef_label},{atom,x},1},
+ {label,4},
+ return]}],
+ 5},
+ Errors = beam_val(M),
[{{undef_label,t,1},{undef_labels,[42]}},
{{undef_label,x,1},{return,4,no_entry_label}}] = Errors,
ok.
illegal_instruction(Config) when is_list(Config) ->
- Errors = do_val(illegal_instruction, Config),
+ M = {illegal_instruction,
+ [{t,1},{x,1},{y,0}],
+ [],
+ [{function,t,1,2,
+ [{label,1},
+ {func_info,{atom,illegal_instruction},{atom,t},1},
+ {label,2},
+ {my_illegal_instruction,{x,0}},
+ return]},
+ {function,x,1,4,
+ [{label,3},
+ bad_func_info,
+ {label,4},
+ {my_illegal_instruction,{x,0}},
+ return]},
+ {function,y,0,17,[]}],
+ 5},
+ Errors = beam_val(M),
[{{illegal_instruction,t,1},
{{my_illegal_instruction,{x,0}},4,unknown_instruction}},
{{'_',x,1},{bad_func_info,1,illegal_instruction}},
@@ -407,19 +416,40 @@ process_request_foo(_) ->
process_request_bar(Pid, [Response]) when is_pid(Pid) ->
Response.
+map_field_lists(Config) ->
+ Errors = do_val(map_field_lists, Config),
+ [{{map_field_lists,x,1},
+ {{test,has_map_fields,{f,1},{x,0},
+ {list,[{atom,z},{atom,a}]}},
+ 5,
+ not_strict_order}},
+ {{map_field_lists,y,1},
+ {{test,has_map_fields,{f,3},{x,0},{list,[]}},
+ 5,
+ empty_field_list}}
+ ] = Errors.
%%%-------------------------------------------------------------------------
-do_val(Name, Config) ->
- do_val(Name, Config, ".S").
-
-do_val(Name, Config, Type) ->
- ?line Data = ?config(data_dir, Config),
- ?line File = filename:join(Data, atom_to_list(Name)++Type),
- ?line case beam_validator:file(File) of
- {error,Errors} ->
- ?line io:format("~p:~n~s",
- [File,beam_validator:format_error(Errors)]),
- Errors;
- ok -> []
- end.
+do_val(Mod, Config) ->
+ Data = ?config(data_dir, Config),
+ Base = atom_to_list(Mod),
+ File = filename:join(Data, Base),
+ case compile:file(File, [from_asm,no_postopt,return_errors]) of
+ {error,L,[]} ->
+ [{Base,Errors0}] = L,
+ Errors = [E || {beam_validator,E} <- Errors0],
+ _ = [io:put_chars(beam_validator:format_error(E)) ||
+ E <- Errors],
+ Errors;
+ {ok,Mod} ->
+ []
+ end.
+
+beam_val(M) ->
+ Name = atom_to_list(element(1, M)),
+ {error,[{Name,Errors0}]} = beam_validator:module(M, []),
+ Errors = [E || {beam_validator,E} <- Errors0],
+ _ = [io:put_chars(beam_validator:format_error(E)) ||
+ E <- Errors],
+ Errors.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S b/lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S
index 279b2fa97f..9630d73a93 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S
@@ -1,4 +1,4 @@
-{module, t}. %% version = 0
+{module, bad_dsetel}. %% version = 0
{exports, [{module_info,0},{module_info,1},{t,1}]}.
@@ -21,7 +21,9 @@
{move,{integer,3},{x,0}}.
{call_ext,3,{extfunc,erlang,setelement,3}}.
{test_heap,6,1}.
- {put_string,3,{string,"abc"},{x,1}}.
+ {put_list,{integer,99},nil,{x,1}}.
+ {put_list,{integer,98},{x,1},{x,1}}.
+ {put_list,{integer,97},{x,1},{x,1}}.
{set_tuple_element,{x,1},{x,0},1}.
{'%live',1}.
{deallocate,0}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S b/lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S
index 2f353fbd25..a59f7ccc03 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S
@@ -1,4 +1,4 @@
-{module, t}. %% version = 0
+{module, bin_aligned}. %% version = 0
{exports, [{decode,1},{module_info,0},{module_info,1}]}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/bin_match.S b/lib/compiler/test/beam_validator_SUITE_data/bin_match.S
deleted file mode 100644
index 96df0f7933..0000000000
--- a/lib/compiler/test/beam_validator_SUITE_data/bin_match.S
+++ /dev/null
@@ -1,64 +0,0 @@
-{module, bin_match}. %% version = 0
-
-{exports, [{t,1}]}.
-
-{attributes, []}.
-
-{labels, 8}.
-
-
-{function, t, 1, 2}.
- {label,1}.
- {func_info,{atom,t},{atom,t},1}.
- {label,2}.
-%% {test,bs_start_match,{f,1},[{x,0}]}.
- {bs_save,0}.
- {test,bs_get_integer,
- {f,3},
- [{integer,8},1,{field_flags,[aligned,unsigned,big]},{x,1}]}.
- {test,bs_get_integer,
- {f,3},
- [{integer,8},1,{field_flags,[aligned,unsigned,big]},{x,2}]}.
- {test,bs_test_tail,{f,3},[0]}.
- {test_heap,3,3}.
- {put_tuple,2,{x,0}}.
- {put,{x,1}}.
- {put,{x,2}}.
- {'%live',1}.
- return.
- {label,3}.
- {bs_restore,0}.
- {test,bs_get_integer,
- {f,1},
- [{integer,32},1,{field_flags,[aligned,unsigned,big]},{x,1}]}.
- {test,bs_test_tail,{f,1},[0]}.
- {move,{x,1},{x,0}}.
- return.
-
-{function, x, 1, 5}.
- {label,4}.
- {func_info,{atom,t},{atom,x},1}.
- {label,5}.
- {test,bs_start_match,{f,4},[{x,0}]}.
- {bs_save,0}.
- {test,bs_get_integer,
- {f,6},
- [{integer,8},1,{field_flags,[aligned,unsigned,big]},{x,1}]}.
- {test,bs_get_integer,
- {f,6},
- [{integer,8},1,{field_flags,[aligned,unsigned,big]},{x,2}]}.
- {test,bs_test_tail,{f,6},[0]}.
- {test_heap,3,3}.
- {put_tuple,2,{x,0}}.
- {put,{x,1}}.
- {put,{x,2}}.
- {'%live',1}.
- return.
- {label,6}.
- {bs_restore,1}.
- {test,bs_get_integer,
- {f,4},
- [{integer,32},1,{field_flags,[aligned,unsigned,big]},{x,1}]}.
- {test,bs_test_tail,{f,4},[0]}.
- {move,{x,1},{x,0}}.
- return.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/compiler_bug.S b/lib/compiler/test/beam_validator_SUITE_data/compiler_bug.S
new file mode 100644
index 0000000000..ba27bf5c47
--- /dev/null
+++ b/lib/compiler/test/beam_validator_SUITE_data/compiler_bug.S
@@ -0,0 +1,38 @@
+{module, compiler_bug}. %% version = 0
+
+{exports, [{module_info,0},{module_info,1},{sum,2}]}.
+
+{attributes, []}.
+
+{labels, 7}.
+
+
+{function, sum, 2, 2}.
+ {label,1}.
+ {line,[{location,"compiler_bug.erl",4}]}.
+ {func_info,{atom,compiler_bug},{atom,sum},2}.
+ {label,2}.
+ {line,[{location,"compiler_bug.erl",5}]}.
+ {gc_bif,'+',{f,0},2,[{y,0},{y,1}],{x,0}}.
+ return.
+
+
+{function, module_info, 0, 4}.
+ {label,3}.
+ {line,[]}.
+ {func_info,{atom,compiler_bug},{atom,module_info},0}.
+ {label,4}.
+ {move,{atom,compiler_bug},{x,0}}.
+ {line,[]}.
+ {call_ext_only,1,{extfunc,erlang,get_module_info,1}}.
+
+
+{function, module_info, 1, 6}.
+ {label,5}.
+ {line,[]}.
+ {func_info,{atom,compiler_bug},{atom,module_info},1}.
+ {label,6}.
+ {move,{x,0},{x,1}}.
+ {move,{atom,compiler_bug},{x,0}}.
+ {line,[]}.
+ {call_ext_only,2,{extfunc,erlang,get_module_info,2}}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/dead_code.S b/lib/compiler/test/beam_validator_SUITE_data/dead_code.S
index f964f98fba..c114664ba0 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/dead_code.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/dead_code.S
@@ -1,10 +1,10 @@
{module, dead_code}. %% version = 0
-{exports, [{execute,0},{module_info,0},{module_info,1}]}.
+{exports, [{execute,0}]}.
{attributes, []}.
-{labels, 10}.
+{labels, 6}.
{function, execute, 0, 2}.
@@ -12,7 +12,6 @@
{func_info,{atom,dead_code},{atom,execute},0}.
{label,2}.
{allocate,0,0}.
- {'%live',0}.
{call_ext,0,{extfunc,foo,fie,0}}.
{test,is_ne,{f,4},[{x,0},{integer,0}]}.
{test,is_ne,{f,4},[{x,0},{integer,1}]}.
@@ -22,27 +21,7 @@
{case_end,{x,0}}.
{label,4}.
{move,{atom,ok},{x,0}}.
- {'%live',1}.
{deallocate,0}.
return.
- {'%','Moved code'}.
{label,5}.
{case_end,{x,0}}.
-
-
-{function, module_info, 0, 7}.
- {label,6}.
- {func_info,{atom,dead_code},{atom,module_info},0}.
- {label,7}.
- {move,nil,{x,0}}.
- {'%live',1}.
- return.
-
-
-{function, module_info, 1, 9}.
- {label,8}.
- {func_info,{atom,dead_code},{atom,module_info},1}.
- {label,9}.
- {move,nil,{x,0}}.
- {'%live',1}.
- return.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/erl_prim_loader.beam b/lib/compiler/test/beam_validator_SUITE_data/erl_prim_loader.beam
deleted file mode 100644
index dd58a88e42..0000000000
--- a/lib/compiler/test/beam_validator_SUITE_data/erl_prim_loader.beam
+++ /dev/null
Binary files differ
diff --git a/lib/compiler/test/beam_validator_SUITE_data/freg_range.S b/lib/compiler/test/beam_validator_SUITE_data/freg_range.S
index ee583a923e..b3ebff3ade 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/freg_range.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/freg_range.S
@@ -1,10 +1,10 @@
{module, freg_range}. %% version = 0
-{exports, [{module_info,0},{module_info,1},{prod,2},{sum,2},{sum_prod,3}]}.
+{exports, [{sum_1,2},{sum_2,2},{sum_3,2},{sum_4,2}]}.
{attributes, []}.
-{labels, 8}.
+{labels, 9}.
{function, sum_1, 2, 2}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/freg_state.S b/lib/compiler/test/beam_validator_SUITE_data/freg_state.S
index ff4d7548ae..7466763482 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/freg_state.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/freg_state.S
@@ -1,6 +1,6 @@
{module, freg_state}. %% version = 0
-{exports, []}.
+{exports, [{sum_1,2},{sum_2,2},{sum_3,2},{sum_4,2},{sum_5,2}]}.
{attributes, []}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S b/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S
index f8d805d9ec..71e833446a 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S
@@ -1,10 +1,10 @@
{module, freg_uninit}. %% version = 0
-{exports, []}.
+{exports, [{sum_1,2},{sum_2,2}]}.
{attributes, []}.
-{labels, 8}.
+{labels, 7}.
{function, sum_1, 2, 2}.
@@ -14,7 +14,6 @@
{fconv,{x,0},{fr,0}}.
fclearerror.
{bif,fadd,{f,0},[{fr,0},{fr,1}],{fr,0}}.
- {'%live',1}.
return.
@@ -26,7 +25,12 @@
{fconv,{x,1},{fr,1}}.
fclearerror.
{fcheckerror,{f,0}}.
- {call,2,{f,8}}.
+ {call,2,{f,6}}.
{bif,fadd,{f,0},[{fr,0},{fr,1}],{fr,0}}.
- {'%live',1}.
+ return.
+
+{function, foo, 2, 6}.
+ {label,5}.
+ {func_info,{atom,t},{atom,foo},2}.
+ {label,6}.
return.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/illegal_instruction.S b/lib/compiler/test/beam_validator_SUITE_data/illegal_instruction.S
deleted file mode 100644
index d6e92abc71..0000000000
--- a/lib/compiler/test/beam_validator_SUITE_data/illegal_instruction.S
+++ /dev/null
@@ -1,26 +0,0 @@
-{module, illegal_instruction}. %% version = 0
-
-{exports, []}.
-
-{attributes, []}.
-
-{labels, 7}.
-
-
-{function, t, 1, 2}.
- {label,1}.
- {func_info,{atom,illegal_instruction},{atom,t},1}.
- {label,2}.
- {my_illegal_instruction,{x,0}}.
- return.
-
-
-{function, x, 1, 4}.
- {label,3}.
- bad_func_info.
- {label,4}.
- {my_illegal_instruction,{x,0}}.
- return.
-
-{function, y, 0, 17}.
- \ No newline at end of file
diff --git a/lib/compiler/test/beam_validator_SUITE_data/map_field_lists.S b/lib/compiler/test/beam_validator_SUITE_data/map_field_lists.S
new file mode 100644
index 0000000000..9af68c82d4
--- /dev/null
+++ b/lib/compiler/test/beam_validator_SUITE_data/map_field_lists.S
@@ -0,0 +1,29 @@
+{module, map_field_lists}. %% version = 0
+
+{exports, [{x,1},{y,1}]}.
+
+{attributes, []}.
+
+{labels, 5}.
+
+
+{function, x, 1, 2}.
+ {label,1}.
+ {line,[{location,"map_field_lists.erl",4}]}.
+ {func_info,{atom,map_field_lists},{atom,x},1}.
+ {label,2}.
+ {test,is_map,{f,1},[{x,0}]}.
+ {test,has_map_fields,{f,1},{x,0},{list,[{atom,z},{atom,a}]}}.
+ {move,{atom,ok},{x,0}}.
+ return.
+
+
+{function, y, 1, 4}.
+ {label,3}.
+ {line,[{location,"map_field_lists.erl",7}]}.
+ {func_info,{atom,map_field_lists},{atom,y},1}.
+ {label,4}.
+ {test,is_map,{f,3},[{x,0}]}.
+ {test,has_map_fields,{f,3},{x,0},{list,[]}}.
+ {move,{atom,ok},{x,0}}.
+ return.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S b/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S
index 3d76127824..481d55045d 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S
@@ -22,7 +22,8 @@
{label,4}.
{allocate_heap,1,6,2}.
{move,{x,1},{y,0}}.
- {put_string,2,{string,"~p"},{x,0}}.
+ {put_list,{integer,112},nil,{x,0}}.
+ {put_list,{integer,126},{x,0},{x,0}}.
{put_list,{y,0},nil,{x,1}}.
{'%live',2}.
{call_ext,2,{extfunc,io,format,2}}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S b/lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S
index e08a718a39..1a5b417a5f 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S
@@ -26,7 +26,7 @@
{call_ext,1,{extfunc,erlang,erase,1}}.
{move,{atom,nested},{x,0}}.
{call_ext,1,{extfunc,erlang,erase,1}}.
- {bif,self,nofail,[],{x,0}}.
+ {bif,self,{f,0},[],{x,0}}.
{'try',{y,8},{f,13}}.
{'try',{y,7},{f,11}}.
{'try',{y,6},{f,9}}.
@@ -34,7 +34,7 @@
%% Because the following instructions can't possible throw an exception,
%% label 7 used to get no state. Now the try_end itself will save the state.
{move,{x,0},{y,4}}.
- {bif,self,nofail,[],{x,0}}.
+ {bif,self,{f,0},[],{x,0}}.
{'%live',1}.
{try_end,{y,5}}.
{test,is_eq_exact,{f,15},[{x,0},{y,4}]}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/stack.S b/lib/compiler/test/beam_validator_SUITE_data/stack.S
index 244c22a2f9..e4356a9d00 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/stack.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/stack.S
@@ -1,10 +1,10 @@
{module, stack}. %% version = 0
-{exports, [{a,2},{b,2},{c,2},{d,2},{e,2}]}.
+{exports, [{a,2},{b,2},{c,2},{d,2},{e,2},{bad_1,0},{bad_2,0},{foo,0}]}.
{attributes, []}.
-{labels, 21}.
+{labels, 17}.
{function, a, 2, 2}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/undef_label.S b/lib/compiler/test/beam_validator_SUITE_data/undef_label.S
deleted file mode 100644
index dd29066bf4..0000000000
--- a/lib/compiler/test/beam_validator_SUITE_data/undef_label.S
+++ /dev/null
@@ -1,22 +0,0 @@
-{module, undef_label}. %% version = 0
-
-{exports, []}.
-
-{attributes, []}.
-
-{labels, 7}.
-
-
-{function, t, 1, 2}.
- {label,1}.
- {func_info,{atom,undef_label},{atom,t},1}.
- {label,2}.
- {test,is_eq_exact,{f,42},[{x,0},{atom,x}]}.
- {move,{atom,ok},{x,0}}.
- return.
-
-{function, x, 1, 17}.
- {label,3}.
- {func_info,{atom,undef_label},{atom,x},1}.
- {label,4}.
- return.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/uninit.S b/lib/compiler/test/beam_validator_SUITE_data/uninit.S
index 1a45c31411..9a66f4f7d6 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/uninit.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/uninit.S
@@ -1,9 +1,11 @@
{module, uninit}. %% version = 0
-{exports, []}.
+{exports, [{sum_1,2},{sum_2,2},{sum_3,2}]}.
{attributes, []}.
+{labels, 9}.
+
{function, sum_1, 2, 2}.
{label,1}.
{func_info,{atom,t},{atom,sum_1},2}.
@@ -11,7 +13,7 @@
{allocate,1,2}.
{move,{y,0},{x,0}}.
{'%live',1}.
- {call,1,{f,10}}.
+ {call,1,{f,8}}.
{bif,'+',{f,0},[{x,0},{y,0}],{x,0}}.
{'%live',1}.
{deallocate,1}.
@@ -23,7 +25,7 @@
{label,4}.
{allocate,1,2}.
{'%live',1}.
- {call,1,{f,10}}.
+ {call,1,{f,8}}.
{bif,'+',{f,0},[{x,0},{y,0}],{x,0}}.
{'%live',1}.
{deallocate,1}.
@@ -35,14 +37,14 @@
{label,6}.
{allocate_zero,1,2}.
{'%live',1}.
- {call,1,{f,10}}.
+ {call,1,{f,8}}.
{bif,'+',{f,0},[{x,0},{y,0}],{x,0}}.
{'%live',1}.
{deallocate,1}.
return.
-{function, id, 1, 10}.
- {label,9}.
+{function, id, 1, 8}.
+ {label,7}.
{func_info,{atom,t},{atom,id},1}.
- {label,10}.
+ {label,8}.
return.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/xrange.S b/lib/compiler/test/beam_validator_SUITE_data/xrange.S
index 3abbdffbc2..c6f20288f7 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/xrange.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/xrange.S
@@ -1,10 +1,10 @@
{module, xrange}. %% version = 0
-{exports, [{module_info,0},{module_info,1},{prod,2},{sum,2},{sum_prod,3}]}.
+{exports, [{sum_1,2},{sum_2,2},{sum_3,2},{sum_4,2}]}.
{attributes, []}.
-{labels, 8}.
+{labels, 9}.
{function, sum_1, 2, 2}.
diff --git a/lib/compiler/test/bs_bit_binaries_SUITE.erl b/lib/compiler/test/bs_bit_binaries_SUITE.erl
index 8609a490f5..2433e7621e 100644
--- a/lib/compiler/test/bs_bit_binaries_SUITE.erl
+++ b/lib/compiler/test/bs_bit_binaries_SUITE.erl
@@ -37,7 +37,7 @@ all() ->
[{group,p}].
groups() ->
- [{p,test_lib:parallel(),
+ [{p,[parallel],
[misc,horrid_match,test_bitstr,test_bit_size,
asymmetric_tests,big_asymmetric_tests,
binary_to_and_from_list,big_binary_to_and_from_list,
diff --git a/lib/compiler/test/bs_construct_SUITE.erl b/lib/compiler/test/bs_construct_SUITE.erl
index ce39de2a82..9df874c387 100644
--- a/lib/compiler/test/bs_construct_SUITE.erl
+++ b/lib/compiler/test/bs_construct_SUITE.erl
@@ -39,7 +39,7 @@ all() ->
[{group,p}].
groups() ->
- [{p,test_lib:parallel(),
+ [{p,[parallel],
[two,test1,fail,float_bin,in_guard,in_catch,
nasty_literals,side_effect,opt,otp_7556,float_arith,
otp_8054]}].
diff --git a/lib/compiler/test/bs_match_SUITE.erl b/lib/compiler/test/bs_match_SUITE.erl
index 149b9bbb8f..f7af56afcc 100644
--- a/lib/compiler/test/bs_match_SUITE.erl
+++ b/lib/compiler/test/bs_match_SUITE.erl
@@ -34,7 +34,7 @@
otp_7188/1,otp_7233/1,otp_7240/1,otp_7498/1,
match_string/1,zero_width/1,bad_size/1,haystack/1,
cover_beam_bool/1,matched_out_size/1,follow_fail_branch/1,
- no_partition/1,calling_a_binary/1]).
+ no_partition/1,calling_a_binary/1,binary_in_map/1]).
-export([coverage_id/1,coverage_external_ignore/2]).
@@ -48,7 +48,7 @@ all() ->
[{group,p}].
groups() ->
- [{p,test_lib:parallel(),
+ [{p,[parallel],
[fun_shadow,int_float,otp_5269,null_fields,wiger,
bin_tail,save_restore,shadowed_size_var,
partitioned_bs_match,function_clause,unit,
@@ -59,7 +59,7 @@ groups() ->
matching_and_andalso,otp_7188,otp_7233,otp_7240,
otp_7498,match_string,zero_width,bad_size,haystack,
cover_beam_bool,matched_out_size,follow_fail_branch,
- no_partition,calling_a_binary]}].
+ no_partition,calling_a_binary,binary_in_map]}].
init_per_suite(Config) ->
@@ -368,11 +368,20 @@ partitioned_bs_match_3(Var, <<_>>) -> Var;
partitioned_bs_match_3(1, 2) -> ok.
function_clause(Config) when is_list(Config) ->
- ?line ok = function_clause_1(<<0,7,0,7,42>>),
- ?line fc(function_clause_1, [<<0,1,2,3>>],
- catch function_clause_1(<<0,1,2,3>>)),
- ?line fc(function_clause_1, [<<0,1,2,3>>],
- catch function_clause_1(<<0,7,0,1,2,3>>)),
+ ok = function_clause_1(<<0,7,0,7,42>>),
+ fc(function_clause_1, [<<0,1,2,3>>],
+ catch function_clause_1(<<0,1,2,3>>)),
+ fc(function_clause_1, [<<0,1,2,3>>],
+ catch function_clause_1(<<0,7,0,1,2,3>>)),
+
+ ok = function_clause_2(<<0,7,0,7,42>>),
+ ok = function_clause_2(<<255>>),
+ ok = function_clause_2(<<13:4>>),
+ fc(function_clause_2, [<<0,1,2,3>>],
+ catch function_clause_2(<<0,1,2,3>>)),
+ fc(function_clause_2, [<<0,1,2,3>>],
+ catch function_clause_2(<<0,7,0,1,2,3>>)),
+
ok.
function_clause_1(<<0:8,7:8,T/binary>>) ->
@@ -380,6 +389,13 @@ function_clause_1(<<0:8,7:8,T/binary>>) ->
function_clause_1(<<_:8>>) ->
ok.
+function_clause_2(<<0:8,7:8,T/binary>>) ->
+ function_clause_2(T);
+function_clause_2(<<_:8>>) ->
+ ok;
+function_clause_2(<<_:4>>) ->
+ ok.
+
unit(Config) when is_list(Config) ->
?line 42 = peek1(<<42>>),
?line 43 = peek1(<<43,1,2>>),
@@ -1189,6 +1205,26 @@ call_binary(<<>>, Acc) ->
call_binary(<<H,T/bits>>, Acc) ->
T(<<Acc/binary,H>>).
+binary_in_map(Config) when is_list(Config) ->
+ ok = match_binary_in_map(#{key => <<42:8>>}),
+ {'EXIT',{{badmatch,#{key := 1}},_}} =
+ (catch match_binary_in_map(#{key => 1})),
+ {'EXIT',{{badmatch,#{key := <<1023:16>>}},_}} =
+ (catch match_binary_in_map(#{key => <<1023:16>>})),
+ {'EXIT',{{badmatch,#{key := <<1:8>>}},_}} =
+ (catch match_binary_in_map(#{key => <<1:8>>})),
+ {'EXIT',{{badmatch,not_a_map},_}} =
+ (catch match_binary_in_map(not_a_map)),
+ ok.
+
+match_binary_in_map(Map) ->
+ case 8 of
+ N ->
+ #{key := <<42:N>>} = Map,
+ ok
+ end.
+
+
check(F, R) ->
R = F().
diff --git a/lib/compiler/test/compilation_SUITE.erl b/lib/compiler/test/compilation_SUITE.erl
index 8711f35e8e..296774e083 100644
--- a/lib/compiler/test/compilation_SUITE.erl
+++ b/lib/compiler/test/compilation_SUITE.erl
@@ -611,12 +611,10 @@ otp_7345(Config) when is_list(Config) ->
otp_7345(ObjRef, _RdEnv, Args) ->
Cid = ObjRef#contextId.cid,
- _DpRef =
- #dpRef{cid = Cid,
+ _ = #dpRef{cid = Cid,
ms_device_context_id = cid_id,
tlli = #ptmsi{value = 0}},
- _QosProfile =
- #qosProfileBssgp{peak_bit_rate_msb = 0,
+ _ = #qosProfileBssgp{peak_bit_rate_msb = 0,
peak_bit_rate_lsb = 80,
t_a_precedence = 49},
[Cpdu|_] = Args,
diff --git a/lib/compiler/test/compile_SUITE.erl b/lib/compiler/test/compile_SUITE.erl
index 128291dc67..1c96abe017 100644
--- a/lib/compiler/test/compile_SUITE.erl
+++ b/lib/compiler/test/compile_SUITE.erl
@@ -30,7 +30,7 @@
other_output/1, encrypted_abstr/1,
bad_record_use1/1, bad_record_use2/1, strict_record/1,
missing_testheap/1, cover/1, env/1, core/1, asm/1,
- sys_pre_attributes/1]).
+ sys_pre_attributes/1, dialyzer/1]).
-export([init/3]).
@@ -47,7 +47,7 @@ all() ->
other_output, encrypted_abstr,
{group, bad_record_use}, strict_record,
missing_testheap, cover, env, core, asm,
- sys_pre_attributes].
+ sys_pre_attributes, dialyzer].
groups() ->
[{bad_record_use, [],
@@ -748,42 +748,65 @@ env_1(Simple, Target) ->
%% compile the generated Core Erlang files.
core(Config) when is_list(Config) ->
- ?line Dog = test_server:timetrap(test_server:minutes(5)),
- ?line PrivDir = ?config(priv_dir, Config),
- ?line Outdir = filename:join(PrivDir, "core"),
- ?line ok = file:make_dir(Outdir),
+ PrivDir = ?config(priv_dir, Config),
+ Outdir = filename:join(PrivDir, "core"),
+ ok = file:make_dir(Outdir),
- ?line Wc = filename:join(filename:dirname(code:which(?MODULE)), "*.beam"),
- ?line TestBeams = filelib:wildcard(Wc),
- ?line Abstr = [begin {ok,{Mod,[{abstract_code,
+ Wc = filename:join(filename:dirname(code:which(?MODULE)), "*.beam"),
+ TestBeams = filelib:wildcard(Wc),
+ Abstr = [begin {ok,{Mod,[{abstract_code,
{raw_abstract_v1,Abstr}}]}} =
beam_lib:chunks(Beam, [abstract_code]),
{Mod,Abstr} end || Beam <- TestBeams],
- ?line Res = test_lib:p_run(fun(F) -> do_core(F, Outdir) end, Abstr),
- ?line test_server:timetrap_cancel(Dog),
- Res.
-
+ test_lib:p_run(fun(F) -> do_core(F, Outdir) end, Abstr).
do_core({M,A}, Outdir) ->
try
- {ok,M,Core} = compile:forms(A, [to_core,report]),
- CoreFile = filename:join(Outdir, atom_to_list(M)++".core"),
- CorePP = core_pp:format(Core),
- ok = file:write_file(CoreFile, CorePP),
- case compile:file(CoreFile, [clint,from_core,binary]) of
- {ok,M,_} ->
- ok = file:delete(CoreFile);
- Other ->
- io:format("*** core_lint failure '~p' for ~s\n",
- [Other,CoreFile]),
- error
- end
- catch Class:Error ->
+ do_core_1(M, A, Outdir)
+ catch
+ throw:{error,Error} ->
+ io:format("*** compilation failure '~p' for module ~s\n",
+ [Error,M]),
+ error;
+ Class:Error ->
io:format("~p: ~p ~p\n~p\n",
[M,Class,Error,erlang:get_stacktrace()]),
error
end.
+do_core_1(M, A, Outdir) ->
+ {ok,M,Core0} = compile:forms(A, [to_core]),
+ CoreFile = filename:join(Outdir, atom_to_list(M)++".core"),
+ CorePP = core_pp:format(Core0),
+ ok = file:write_file(CoreFile, CorePP),
+
+ %% Parse the .core file and return the result as Core Erlang Terms.
+ Core = case compile:file(CoreFile, [report_errors,from_core,no_copt,to_core,binary]) of
+ {ok,M,Core1} -> Core1;
+ Other -> throw({error,Other})
+ end,
+ ok = file:delete(CoreFile),
+
+ %% Compile as usual (including optimizations).
+ compile_forms(Core, [clint,from_core,binary]),
+
+ %% Don't optimize to test that we are not dependent
+ %% on the Core Erlang optmimization passes.
+ %% (Example of a previous bug: The core_parse pass
+ %% would not turn map literals into #c_literal{}
+ %% records; if sys_core_fold was run it would fix
+ %% that; if sys_core_fold was not run v3_kernel would
+ %% crash.)
+ compile_forms(Core, [clint,from_core,no_copt,binary]),
+
+ ok.
+
+compile_forms(Forms, Opts) ->
+ case compile:forms(Forms, [report_errors|Opts]) of
+ {ok,[],_} -> ok;
+ Other -> throw({error,Other})
+ end.
+
%% Compile to Beam assembly language (.S) and then try to
%% run .S through the compiler again.
@@ -854,6 +877,20 @@ sys_pre_attributes(Config) ->
[report,verbose]),
ok.
+%% Test the dialyzer option to cover more code.
+dialyzer(Config) ->
+ Priv = ?config(priv_dir, Config),
+ file:set_cwd(?config(data_dir, Config)),
+ Opts = [{outdir,Priv},report_errors],
+ M = dialyzer_test,
+ {ok,M} = c:c(M, [dialyzer|Opts]),
+ [{a,b,c}] = M:M(),
+
+ %% Cover huge line numbers without the 'dialyzer' option.
+ {ok,M} = c:c(M, Opts),
+ [{a,b,c}] = M:M(),
+ ok.
+
%%%
%%% Utilities.
%%%
diff --git a/lib/compiler/test/compile_SUITE_data/dialyzer_test.erl b/lib/compiler/test/compile_SUITE_data/dialyzer_test.erl
new file mode 100644
index 0000000000..ed65ff9c43
--- /dev/null
+++ b/lib/compiler/test/compile_SUITE_data/dialyzer_test.erl
@@ -0,0 +1,39 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(dialyzer_test).
+-export([?MODULE/0,turtle/0,test/1,huge/1]).
+
+-record(turtle, {a,b,c}).
+-record(tortoise, {a,b,c}).
+
+?MODULE() ->
+ [{a,b,c}].
+
+turtle() ->
+ #turtle{a=1,b=2,c=3}.
+
+test(T) ->
+ {T#tortoise.a,T#tortoise.b}.
+
+-file("dialyzer_test", 100000000).
+
+huge(X) ->
+ #turtle{a=42,b=100,c=511},
+ X#tortoise.a.
diff --git a/lib/compiler/test/core_SUITE_data/map_core_test.core b/lib/compiler/test/core_SUITE_data/map_core_test.core
index 2aa853d450..a75f6cf24f 100644
--- a/lib/compiler/test/core_SUITE_data/map_core_test.core
+++ b/lib/compiler/test/core_SUITE_data/map_core_test.core
@@ -7,11 +7,11 @@ module 'map_core_test' ['map_core_test'/0,
fun () ->
let <_cor0> =
%% Line 15
- ~{::<'check','ok'>,::<1337,#{#<104>(8,1,'integer',['unsigned'|['big']]),
+ ~{'check'=>'ok',1337=>#{#<104>(8,1,'integer',['unsigned'|['big']]),
#<101>(8,1,'integer',['unsigned'|['big']]),
#<108>(8,1,'integer',['unsigned'|['big']]),
#<108>(8,1,'integer',['unsigned'|['big']]),
- #<111>(8,1,'integer',['unsigned'|['big']])}#>,::<'val',0>}~
+ #<111>(8,1,'integer',['unsigned'|['big']])}#,'val'=>0}~
in let <M> =
%% Line 15
apply 'id'/1
@@ -23,7 +23,7 @@ module 'map_core_test' ['map_core_test'/0,
in %% Line 16
case apply 'call'/2
(M, _cor2) of
- <~{~<1337,#{#<104>(8,1,'integer',['unsigned'|['big']]),
+ <~{1337:=#{#<104>(8,1,'integer',['unsigned'|['big']]),
#<101>(8,1,'integer',['unsigned'|['big']]),
#<108>(8,1,'integer',['unsigned'|['big']]),
#<108>(8,1,'integer',['unsigned'|['big']]),
@@ -39,7 +39,7 @@ module 'map_core_test' ['map_core_test'/0,
#<32>(8,1,'integer',['unsigned'|['big']]),
#<53>(8,1,'integer',['unsigned'|['big']]),
#<32>(8,1,'integer',['unsigned'|['big']]),
- #<54>(8,1,'integer',['unsigned'|['big']])}#>,~<'check','ok'>,~<'val',21>}~> when 'true' ->
+ #<54>(8,1,'integer',['unsigned'|['big']])}#,'check':='ok','val':=21}~> when 'true' ->
%% Line 17
'ok'
( <_cor3> when 'true' ->
@@ -51,7 +51,7 @@ module 'map_core_test' ['map_core_test'/0,
%% Line 20
fun (_cor1,_cor0) ->
case <_cor1,_cor0> of
- <M = ~{~<1337,Bin>,~<'check',_cor8>,~<'val',Val>}~,[V|Vs]> when 'true' ->
+ <M = ~{1337:=Bin,'check':=_cor8,'val':=Val}~,[V|Vs]> when 'true' ->
let <_cor3> =
%% Line 21
call 'erlang':'+'
@@ -67,7 +67,7 @@ module 'map_core_test' ['map_core_test'/0,
(Val, V)
in let <_cor5> =
%% Line 21
- ~{~<1337,_cor4>,~<'val',_cor2>|M}~
+ ~{1337:=_cor4,'val':=_cor2|M}~
in %% Line 21
apply 'call'/2
(_cor5, Vs)
diff --git a/lib/compiler/test/core_fold_SUITE.erl b/lib/compiler/test/core_fold_SUITE.erl
index 6a7036d728..512aada203 100644
--- a/lib/compiler/test/core_fold_SUITE.erl
+++ b/lib/compiler/test/core_fold_SUITE.erl
@@ -23,7 +23,8 @@
t_element/1,setelement/1,t_length/1,append/1,t_apply/1,bifs/1,
eq/1,nested_call_in_case/1,guard_try_catch/1,coverage/1,
unused_multiple_values_error/1,unused_multiple_values/1,
- multiple_aliases/1,redundant_boolean_clauses/1,mixed_matching_clauses/1]).
+ multiple_aliases/1,redundant_boolean_clauses/1,
+ mixed_matching_clauses/1,unnecessary_building/1]).
-export([foo/0,foo/1,foo/2,foo/3]).
@@ -36,11 +37,12 @@ all() ->
[{group,p}].
groups() ->
- [{p,test_lib:parallel(),
+ [{p,[parallel],
[t_element,setelement,t_length,append,t_apply,bifs,
eq,nested_call_in_case,guard_try_catch,coverage,
unused_multiple_values_error,unused_multiple_values,
- multiple_aliases,redundant_boolean_clauses,mixed_matching_clauses]}].
+ multiple_aliases,redundant_boolean_clauses,
+ mixed_matching_clauses,unnecessary_building]}].
init_per_suite(Config) ->
@@ -60,6 +62,12 @@ t_element(Config) when is_list(Config) ->
X = make_ref(),
?line X = id(element(1, {X,y,z})),
?line b = id(element(2, {a,b,c,d})),
+ (fun() ->
+ case {a,#{k=>X}} of
+ {a,#{k:=X}}=Tuple ->
+ #{k:=X} = id(element(2, Tuple))
+ end
+ end)(),
%% No optimization, but should work.
Tuple = id({x,y,z}),
@@ -189,7 +197,10 @@ foo(A, B, C) ->
A + B + C.
bifs(Config) when is_list(Config) ->
- ?line <<1,2,3,4>> = id(list_to_binary([1,2,3,4])),
+ <<1,2,3,4>> = id(list_to_binary([1,2,3,4])),
+ K = {a,key},
+ V = {a,value},
+ {ok,#{K:=V}} = id(list_to_tuple([ok,#{K=>V}])),
ok.
-define(CMP_SAME(A0, B), (fun(A) -> true = A == B, false = A /= B end)(id(A0))).
@@ -204,6 +215,16 @@ eq(Config) when is_list(Config) ->
?line ?CMP_DIFF(a, [a]),
?line ?CMP_DIFF(a, {1,2,3}),
+ ?CMP_SAME(#{a=>1.0,b=>2}, #{b=>2.0,a=>1}),
+ ?CMP_SAME(#{a=>[1.0],b=>[2]}, #{b=>[2.0],a=>[1]}),
+
+ %% The rule for comparing keys are different in 17.x and 18.x.
+ %% Just test that the results are consistent.
+ Bool = id(#{1=>a}) == id(#{1.0=>a}), %Unoptimizable.
+ Bool = id(#{1=>a}) == #{1.0=>a}, %Optimizable.
+ Bool = #{1=>a} == #{1.0=>a}, %Optimizable.
+ io:format("Bool = ~p\n", [Bool]),
+
ok.
%% OTP-7117.
@@ -236,6 +257,8 @@ do_guard_try_catch(K, V) ->
false
end.
+-record(cover_opt_guard_try, {list=[]}).
+
coverage(Config) when is_list(Config) ->
?line {'EXIT',{{case_clause,{a,b,c}},_}} =
(catch cover_will_match_list_type({a,b,c})),
@@ -245,6 +268,9 @@ coverage(Config) when is_list(Config) ->
?line error = cover_will_match_lit_list(),
{ok,[a]} = cover_is_safe_bool_expr(a),
+ ok = cover_opt_guard_try(#cover_opt_guard_try{list=[a]}),
+ error = cover_opt_guard_try(#cover_opt_guard_try{list=[]}),
+
%% Make sure that we don't attempt to make literals
%% out of pids. (Putting a pid into a #c_literal{}
%% would crash later compiler passes.)
@@ -257,6 +283,12 @@ coverage(Config) when is_list(Config) ->
error = bsm_an_inlined(<<1,2,3>>, Config),
error = bsm_an_inlined([], Config),
+ %% Cover eval_rel_op/4.
+ Tuple = id({a,b}),
+ false = case Tuple of
+ {_,_} ->
+ Tuple =:= true
+ end,
ok.
cover_will_match_list_type(A) ->
@@ -298,6 +330,14 @@ cover_is_safe_bool_expr(X) ->
false
end.
+cover_opt_guard_try(Msg) ->
+ if
+ length(Msg#cover_opt_guard_try.list) =/= 1 ->
+ error;
+ true ->
+ ok
+ end.
+
bsm_an_inlined(<<_:8>>, _) -> ok;
bsm_an_inlined(_, _) -> error.
@@ -384,4 +424,29 @@ mixed_matching_clauses(Config) when is_list(Config) ->
end,
ok.
+unnecessary_building(Config) when is_list(Config) ->
+ Term1 = do_unnecessary_building_1(test_lib:id(a)),
+ [{a,a},{a,a}] = Term1,
+ 7 = erts_debug:size(Term1),
+
+ %% The Input term should not be rebuilt (thus, it should
+ %% only be counted once in the size of the combined term).
+ Input = test_lib:id({a,b,c}),
+ Term2 = test_lib:id(do_unnecessary_building_2(Input)),
+ {b,[{a,b,c},none],x} = Term2,
+ 4+4+4+2 = erts_debug:size([Term2|Input]),
+
+ ok.
+
+do_unnecessary_building_1(S) ->
+ %% The tuple must only be built once.
+ F0 = F1 = {S,S},
+ [F0,F1].
+
+do_unnecessary_building_2({a,_,_}=T) ->
+ %% The T term should not be rebuilt.
+ {b,
+ [_,_] = [T,none],
+ x}.
+
id(I) -> I.
diff --git a/lib/compiler/test/float_SUITE.erl b/lib/compiler/test/float_SUITE.erl
index afc04fd440..fb8da37f4f 100644
--- a/lib/compiler/test/float_SUITE.erl
+++ b/lib/compiler/test/float_SUITE.erl
@@ -118,6 +118,7 @@ math_functions(Config) when is_list(Config) ->
?line 0.0 = math:sinh(0),
?line 1.0 = math:cosh(0),
?line 0.0 = math:tanh(0),
+ 1.0 = math:log2(2),
?line 1.0 = math:log10(10),
?line -1.0 = math:cos(math:pi()),
?line 1.0 = math:exp(0),
@@ -136,6 +137,7 @@ math_functions(Config) when is_list(Config) ->
?line 0.0 = math:sinh(id(0)),
?line 1.0 = math:cosh(id(0)),
?line 0.0 = math:tanh(id(0)),
+ 1.0 = math:log2(id(2)),
?line 1.0 = math:log10(id(10)),
?line 1.0 = math:exp(id(0)),
?line 0.0 = math:log(id(1)),
diff --git a/lib/compiler/test/guard_SUITE.erl b/lib/compiler/test/guard_SUITE.erl
index eb205d09a7..8db47ffa40 100644
--- a/lib/compiler/test/guard_SUITE.erl
+++ b/lib/compiler/test/guard_SUITE.erl
@@ -30,7 +30,7 @@
old_guard_tests/1,
build_in_guard/1,gbif/1,
t_is_boolean/1,is_function_2/1,
- tricky/1,rel_ops/1,literal_type_tests/1,
+ tricky/1,rel_ops/1,rel_op_combinations/1,literal_type_tests/1,
basic_andalso_orelse/1,traverse_dcd/1,
check_qlc_hrl/1,andalso_semi/1,t_tuple_size/1,binary_part/1,
bad_constants/1,bad_guards/1]).
@@ -42,12 +42,13 @@ all() ->
[{group,p}].
groups() ->
- [{p,test_lib:parallel(),
+ [{p,[parallel],
[misc,const_cond,basic_not,complex_not,nested_nots,
semicolon,complex_semicolon,comma,or_guard,
more_or_guards,complex_or_guards,and_guard,xor_guard,
more_xor_guards,build_in_guard,old_guard_tests,gbif,
- t_is_boolean,is_function_2,tricky,rel_ops,
+ t_is_boolean,is_function_2,tricky,
+ rel_ops,rel_op_combinations,
literal_type_tests,basic_andalso_orelse,traverse_dcd,
check_qlc_hrl,andalso_semi,t_tuple_size,binary_part,
bad_constants,bad_guards]}].
@@ -330,7 +331,15 @@ complex_semicolon(Config) when is_list(Config) ->
?line ok = csemi6({a,b}, 0),
?line ok = csemi6({}, 3),
?line ok = csemi6({a,b,c}, 3),
-
+
+ %% 7
+ error = csemi7(#{a=>1}, 1, 0),
+ error = csemi7(<<>>, 1, 0),
+ ok = csemi7(#{a=>1}, 3, 0),
+ ok = csemi7(#{a=>1}, 0, 3),
+ ok = csemi7(#{a=>1}, 3, 3),
+ ok = csemi7(#{a=>1, b=>3}, 0, 0),
+
ok.
csemi1(Type, Val) when is_list(Val), Type == float;
@@ -442,6 +451,9 @@ csemi5(_, _) -> error.
csemi6(A, B) when hd([tuple_size(A)]) > 1; abs(B) > 2 -> ok;
csemi6(_, _) -> error.
+csemi7(A, B, C) when A#{a:=B} > #{a=>1}; abs(C) > 2 -> ok;
+csemi7(_, _, _) -> error.
+
comma(Config) when is_list(Config) ->
%% ',' combinations of literal true/false.
@@ -1122,6 +1134,231 @@ rel_ops(Config) when is_list(Config) ->
-undef(TestOp).
+rel_op_combinations(Config) when is_list(Config) ->
+ Digits0 = lists:seq(16#0030, 16#0039) ++
+ lists:seq(16#0660, 16#0669) ++
+ lists:seq(16#06F0, 16#06F9),
+ Digits = gb_sets:from_list(Digits0),
+ rel_op_combinations_1(16#0700, Digits),
+
+ BrokenRange0 = lists:seq(3, 5) ++
+ lists:seq(10, 12) ++ lists:seq(14, 20),
+ BrokenRange = gb_sets:from_list(BrokenRange0),
+ rel_op_combinations_2(30, BrokenRange),
+
+ Red0 = [{I,2*I} || I <- lists:seq(0, 50)] ++
+ [{I,5*I} || I <- lists:seq(51, 80)],
+ Red = gb_trees:from_orddict(Red0),
+ rel_op_combinations_3(100, Red).
+
+rel_op_combinations_1(0, _) ->
+ ok;
+rel_op_combinations_1(N, Digits) ->
+ Bool = gb_sets:is_member(N, Digits),
+ Bool = is_digit_1(N),
+ Bool = is_digit_2(N),
+ Bool = is_digit_3(N),
+ Bool = is_digit_4(N),
+ Bool = is_digit_5(N),
+ Bool = is_digit_6(N),
+ Bool = is_digit_7(N),
+ Bool = is_digit_8(N),
+ rel_op_combinations_1(N-1, Digits).
+
+is_digit_1(X) when 16#0660 =< X, X =< 16#0669 -> true;
+is_digit_1(X) when 16#0030 =< X, X =< 16#0039 -> true;
+is_digit_1(X) when 16#06F0 =< X, X =< 16#06F9 -> true;
+is_digit_1(_) -> false.
+
+is_digit_2(X) when (16#0030-1) < X, X =< 16#0039 -> true;
+is_digit_2(X) when (16#0660-1) < X, X =< 16#0669 -> true;
+is_digit_2(X) when (16#06F0-1) < X, X =< 16#06F9 -> true;
+is_digit_2(_) -> false.
+
+is_digit_3(X) when 16#0660 =< X, X < (16#0669+1) -> true;
+is_digit_3(X) when 16#0030 =< X, X < (16#0039+1) -> true;
+is_digit_3(X) when 16#06F0 =< X, X < (16#06F9+1) -> true;
+is_digit_3(_) -> false.
+
+is_digit_4(X) when (16#0660-1) < X, X < (16#0669+1) -> true;
+is_digit_4(X) when (16#0030-1) < X, X < (16#0039+1) -> true;
+is_digit_4(X) when (16#06F0-1) < X, X < (16#06F9+1) -> true;
+is_digit_4(_) -> false.
+
+is_digit_5(X) when X >= 16#0660, X =< 16#0669 -> true;
+is_digit_5(X) when X >= 16#0030, X =< 16#0039 -> true;
+is_digit_5(X) when X >= 16#06F0, X =< 16#06F9 -> true;
+is_digit_5(_) -> false.
+
+is_digit_6(X) when X > (16#0660-1), X =< 16#0669 -> true;
+is_digit_6(X) when X > (16#0030-1), X =< 16#0039 -> true;
+is_digit_6(X) when X > (16#06F0-1), X =< 16#06F9 -> true;
+is_digit_6(_) -> false.
+
+is_digit_7(X) when 16#0660 =< X, X =< 16#0669 -> true;
+is_digit_7(X) when 16#0030 =< X, X =< 16#003A, X =/= 16#003A -> true;
+is_digit_7(X) when 16#06F0 =< X, X =< 16#06F9 -> true;
+is_digit_7(_) -> false.
+
+is_digit_8(X) when X =< 16#0039, X > (16#0030-1) -> true;
+is_digit_8(X) when X =< 16#06F9, X > (16#06F0-1) -> true;
+is_digit_8(X) when X =< 16#0669, X > (16#0660-1) -> true;
+is_digit_8(16#0670) -> false;
+is_digit_8(_) -> false.
+
+rel_op_combinations_2(0, _) ->
+ ok;
+rel_op_combinations_2(N, Range) ->
+ Bool = gb_sets:is_member(N, Range),
+ Bool = broken_range_1(N),
+ Bool = broken_range_2(N),
+ Bool = broken_range_3(N),
+ Bool = broken_range_4(N),
+ Bool = broken_range_5(N),
+ Bool = broken_range_6(N),
+ Bool = broken_range_7(N),
+ Bool = broken_range_8(N),
+ Bool = broken_range_9(N),
+ Bool = broken_range_10(N),
+ Bool = broken_range_11(N),
+ Bool = broken_range_12(N),
+ Bool = broken_range_13(N),
+ rel_op_combinations_2(N-1, Range).
+
+broken_range_1(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_1(X) when X >= 3, X =< 5 -> true;
+broken_range_1(_) -> false.
+
+broken_range_2(X) when X >= 10, X =< 12 -> true;
+broken_range_2(X) when X >= 14, X =< 20 -> true;
+broken_range_2(X) when X >= 3, X =< 5 -> true;
+broken_range_2(_) -> false.
+
+broken_range_3(X) when X >= 10, X =< 12 -> true;
+broken_range_3(X) when X >= 14, X < 21 -> true;
+broken_range_3(3) -> true;
+broken_range_3(4) -> true;
+broken_range_3(5) -> true;
+broken_range_3(_) -> false.
+
+broken_range_4(X) when X =< 5, X >= 3 -> true;
+broken_range_4(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_4(X) when X =< 100 -> false;
+broken_range_4(_) -> false.
+
+broken_range_5(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_5(X) when X > 2, X =< 5 -> true;
+broken_range_5(_) -> false.
+
+broken_range_6(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_6(X) when X > 2, X < 6 -> true;
+broken_range_6(_) -> false.
+
+broken_range_7(X) when X > 2, X < 6 -> true;
+broken_range_7(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_7(X) when X > 30 -> false;
+broken_range_7(_) -> false.
+
+broken_range_8(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_8(X) when X =:= 3 -> true;
+broken_range_8(X) when X >= 3, X =< 5 -> true;
+broken_range_8(_) -> false.
+
+broken_range_9(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_9(X) when X =:= 13 -> false;
+broken_range_9(X) when X >= 3, X =< 5 -> true;
+broken_range_9(_) -> false.
+
+broken_range_10(X) when X >= 3, X =< 5 -> true;
+broken_range_10(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_10(X) when X =/= 13 -> false;
+broken_range_10(_) -> false.
+
+broken_range_11(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_11(X) when is_tuple(X), X =:= 10 -> true;
+broken_range_11(X) when X >= 3, X =< 5 -> true;
+broken_range_11(_) -> false.
+
+broken_range_12(X) when X >= 3, X =< 5 -> true;
+broken_range_12(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_12(X) when X < 30, X > 20 -> false;
+broken_range_12(_) -> false.
+
+broken_range_13(X) when X >= 10, X =< 20, 13 =/= X -> true;
+broken_range_13(X) when X >= 3, X =< 5 -> true;
+broken_range_13(_) -> false.
+
+rel_op_combinations_3(0, _) ->
+ ok;
+rel_op_combinations_3(N, Red) ->
+ Val = case gb_trees:lookup(N, Red) of
+ none -> none;
+ {value,V} -> V
+ end,
+ Val = redundant_1(N),
+ Val = redundant_2(N),
+ Val = redundant_3(N),
+ Val = redundant_4(N),
+ Val = redundant_5(N),
+ Val = redundant_6(N),
+ Val = redundant_7(N),
+ Val = redundant_8(N),
+ Val = redundant_9(N),
+ Val = redundant_10(N),
+ Val = redundant_11(N),
+ rel_op_combinations_3(N-1, Red).
+
+redundant_1(X) when X >= 51, X =< 80 -> 5*X;
+redundant_1(X) when X < 51 -> 2*X;
+redundant_1(_) -> none.
+
+redundant_2(X) when X < 51 -> 2*X;
+redundant_2(X) when X >= 51, X =< 80 -> 5*X;
+redundant_2(_) -> none.
+
+redundant_3(X) when X < 51 -> 2*X;
+redundant_3(X) when X =< 80, X >= 51 -> 5*X;
+redundant_3(X) when X =/= 100 -> none;
+redundant_3(_) -> none.
+
+redundant_4(X) when X < 51 -> 2*X;
+redundant_4(X) when X =< 80, X > 50 -> 5*X;
+redundant_4(X) when X =/= 100 -> none;
+redundant_4(_) -> none.
+
+redundant_5(X) when X < 51 -> 2*X;
+redundant_5(X) when X > 50, X < 81 -> 5*X;
+redundant_5(X) when X =< 10 -> none;
+redundant_5(_) -> none.
+
+redundant_6(X) when X > 50, X =< 80 -> 5*X;
+redundant_6(X) when X < 51 -> 2*X;
+redundant_6(_) -> none.
+
+redundant_7(X) when is_integer(X), X >= 51, X =< 80 -> 5*X;
+redundant_7(X) when is_integer(X), X < 51 -> 2*X;
+redundant_7(_) -> none.
+
+redundant_8(X) when X >= 51, X =< 80 -> 5*X;
+redundant_8(X) when X < 51 -> 2*X;
+redundant_8(_) -> none.
+
+redundant_9(X) when X >= 51, X =< 80 -> 5*X;
+redundant_9(X) when X < 51 -> 2*X;
+redundant_9(90) -> none;
+redundant_9(X) when X =/= 90 -> none;
+redundant_9(_) -> none.
+
+redundant_10(X) when X >= 51, X =< 80 -> 5*X;
+redundant_10(X) when X < 51 -> 2*X;
+redundant_10(90) -> none;
+redundant_10(X) when X =:= 90 -> none;
+redundant_10(_) -> none.
+
+redundant_11(X) when X < 51 -> 2*X;
+redundant_11(X) when X =:= 10 -> 2*X;
+redundant_11(X) when X >= 51, X =< 80 -> 5*X;
+redundant_11(_) -> none.
%% Test type tests on literal values. (From emulator test suites.)
literal_type_tests(Config) when is_list(Config) ->
@@ -1556,6 +1793,24 @@ bad_constants(Config) when is_list(Config) ->
bad_guards(Config) when is_list(Config) ->
if erlang:float(self()); true -> ok end,
+
+ fc(catch bad_guards_1(1, [])),
+ fc(catch bad_guards_1(1, [2])),
+ fc(catch bad_guards_1(atom, [2])),
+
+ fc(catch bad_guards_2(#{a=>0,b=>0}, [])),
+ fc(catch bad_guards_2(#{a=>0,b=>0}, [x])),
+ fc(catch bad_guards_2(not_a_map, [x])),
+ fc(catch bad_guards_2(42, [x])),
+ ok.
+
+%% beam_bool used to produce GC BIF instructions whose
+%% Live operands included uninitialized registers.
+
+bad_guards_1(X, [_]) when {{X}}, -X ->
+ ok.
+
+bad_guards_2(M, [_]) when M#{a := 0, b => 0}, map_size(M) ->
ok.
%% Call this function to turn off constant propagation.
diff --git a/lib/compiler/test/lc_SUITE.erl b/lib/compiler/test/lc_SUITE.erl
index 398398a397..62bada1407 100644
--- a/lib/compiler/test/lc_SUITE.erl
+++ b/lib/compiler/test/lc_SUITE.erl
@@ -18,12 +18,12 @@
%%
-module(lc_SUITE).
--author('[email protected]').
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2,
init_per_testcase/2,end_per_testcase/2,
basic/1,deeply_nested/1,no_generator/1,
- empty_generator/1,no_export/1]).
+ empty_generator/1,no_export/1,shadow/1,
+ effect/1]).
-include_lib("test_server/include/test_server.hrl").
@@ -31,10 +31,18 @@ suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
test_lib:recompile(?MODULE),
- [basic, deeply_nested, no_generator, empty_generator, no_export].
+ [{group,p}].
groups() ->
- [].
+ [{p,test_lib:parallel(),
+ [basic,
+ deeply_nested,
+ no_generator,
+ empty_generator,
+ no_export,
+ shadow,
+ effect
+ ]}].
init_per_suite(Config) ->
Config.
@@ -59,34 +67,34 @@ end_per_testcase(Case, Config) when is_atom(Case), is_list(Config) ->
ok.
basic(Config) when is_list(Config) ->
- ?line L0 = lists:seq(1, 10),
- ?line L1 = my_map(fun(X) -> {x,X} end, L0),
- ?line L1 = [{x,X} || X <- L0],
- ?line L0 = my_map(fun({x,X}) -> X end, L1),
- ?line [1,2,3,4,5] = [X || X <- L0, X < 6],
- ?line [4,5,6] = [X || X <- L0, X > 3, X < 7],
- ?line [] = [X || X <- L0, X > 32, X < 7],
- ?line [1,3,5,7,9] = [X || X <- L0, odd(X)],
- ?line [2,4,6,8,10] = [X || X <- L0, not odd(X)],
- ?line [1,3,5,9] = [X || X <- L0, odd(X), X =/= 7],
- ?line [2,4,8,10] = [X || X <- L0, not odd(X), X =/= 6],
+ L0 = lists:seq(1, 10),
+ L1 = my_map(fun(X) -> {x,X} end, L0),
+ L1 = [{x,X} || X <- L0],
+ L0 = my_map(fun({x,X}) -> X end, L1),
+ [1,2,3,4,5] = [X || X <- L0, X < 6],
+ [4,5,6] = [X || X <- L0, X > 3, X < 7],
+ [] = [X || X <- L0, X > 32, X < 7],
+ [1,3,5,7,9] = [X || X <- L0, odd(X)],
+ [2,4,6,8,10] = [X || X <- L0, not odd(X)],
+ [1,3,5,9] = [X || X <- L0, odd(X), X =/= 7],
+ [2,4,8,10] = [X || X <- L0, not odd(X), X =/= 6],
%% Append is specially handled.
- ?line [1,3,5,9,2,4,8,10] = [X || X <- L0, odd(X), X =/= 7] ++
+ [1,3,5,9,2,4,8,10] = [X || X <- L0, odd(X), X =/= 7] ++
[X || X <- L0, not odd(X), X =/= 6],
%% Guards BIFs are evaluated in guard context. Weird, but true.
- ?line [{a,b,true},{x,y,true,true}] = [X || X <- tuple_list(), element(3, X)],
+ [{a,b,true},{x,y,true,true}] = [X || X <- tuple_list(), element(3, X)],
%% Filter expressions with andalso/orelse.
- ?line "abc123" = alphanum("?abc123.;"),
+ "abc123" = alphanum("?abc123.;"),
%% Error cases.
- ?line [] = [{xx,X} || X <- L0, element(2, X) == no_no_no],
- ?line {'EXIT',_} = (catch [X || X <- L1, list_to_atom(X) == dum]),
- ?line [] = [X || X <- L1, X+1 < 2],
- ?line {'EXIT',_} = (catch [X || X <- L1, odd(X)]),
- ?line fc([x], catch [E || E <- id(x)]),
+ [] = [{xx,X} || X <- L0, element(2, X) == no_no_no],
+ {'EXIT',_} = (catch [X || X <- L1, list_to_atom(X) == dum]),
+ [] = [X || X <- L1, X+1 < 2],
+ {'EXIT',_} = (catch [X || X <- L1, odd(X)]),
+ fc([x], catch [E || E <- id(x)]),
ok.
tuple_list() ->
@@ -116,12 +124,12 @@ deeply_nested_1() ->
X16 <- [4],X17 <- [3],X18 <- [fun() -> X16+X17 end],X19 <- [2],X20 <- [1]].
no_generator(Config) when is_list(Config) ->
- ?line Seq = lists:seq(-10, 17),
- ?line [no_gen_verify(no_gen(A, B), A, B) || A <- Seq, B <- Seq],
+ Seq = lists:seq(-10, 17),
+ [no_gen_verify(no_gen(A, B), A, B) || A <- Seq, B <- Seq],
%% Literal expression, for coverage.
- ?line [a] = [a || true],
- ?line [a,b,c] = [a || true] ++ [b,c],
+ [a] = [a || true],
+ [a,b,c] = [a || true] ++ [b,c],
ok.
no_gen(A, B) ->
@@ -174,13 +182,51 @@ no_gen_eval(Fun, Res) ->
no_gen_one_more(A, B) -> A + 1 =:= B.
empty_generator(Config) when is_list(Config) ->
- ?line [] = [X || {X} <- [], (false or (X/0 > 3))],
+ [] = [X || {X} <- [], (false or (X/0 > 3))],
ok.
no_export(Config) when is_list(Config) ->
[] = [ _X = a || false ] ++ [ _X = a || false ],
ok.
+%% Test that variables in list comprehensions are
+%% correctly shadowed.
+
+shadow(Config) when is_list(Config) ->
+ Shadowed = nomatch,
+ _ = id(Shadowed), %Eliminate warning.
+ L = [{Shadowed,Shadowed+1} || Shadowed <- lists:seq(7, 9)],
+ [{7,8},{8,9},{9,10}] = id(L),
+ [8,9] = id([Shadowed || {_,Shadowed} <- id(L),
+ Shadowed < 10]),
+ ok.
+
+effect(Config) when is_list(Config) ->
+ [{42,{a,b,c}}] =
+ do_effect(fun(F, L) ->
+ [F({V1,V2}) ||
+ #{<<1:500>>:=V1,<<2:301>>:=V2} <- L],
+ ok
+ end, id([#{},x,#{<<1:500>>=>42,<<2:301>>=>{a,b,c}}])),
+
+ %% Will trigger the time-trap timeout if not tail-recursive.
+ case ?MODULE of
+ lc_SUITE ->
+ _ = [{'EXIT',{badarg,_}} =
+ (catch binary_to_atom(<<C/utf8>>, utf8)) ||
+ C <- lists:seq(16#10000, 16#FFFFF)];
+ _ ->
+ ok
+ end,
+
+ ok.
+
+do_effect(Lc, L) ->
+ put(?MODULE, []),
+ F = fun(V) -> put(?MODULE, [V|get(?MODULE)]) end,
+ ok = Lc(F, L),
+ lists:reverse(erase(?MODULE)).
+
id(I) -> I.
fc(Args, {'EXIT',{function_clause,[{?MODULE,_,Args,_}|_]}}) -> ok;
diff --git a/lib/compiler/test/map_SUITE.erl b/lib/compiler/test/map_SUITE.erl
index 75efce9d7b..cfa8262701 100644
--- a/lib/compiler/test/map_SUITE.erl
+++ b/lib/compiler/test/map_SUITE.erl
@@ -61,7 +61,9 @@
suite() -> [].
-all() -> [
+all() ->
+ test_lib:recompile(?MODULE),
+ [
%% literals
t_build_and_match_literals,
t_update_literals, t_match_and_update_literals,
@@ -317,6 +319,12 @@ t_update_exact(Config) when is_list(Config) ->
{'EXIT',{badarg,_}} = (catch M0#{42=>v1,42.0:=v2,42:=v3}),
{'EXIT',{badarg,_}} = (catch <<>>#{nonexisting:=val}),
{'EXIT',{badarg,_}} = (catch M0#{<<0:257>> := val}), %% limitation
+
+ %% A workaround for a bug allowed an empty map to be updated.
+ {'EXIT',{badarg,_}} = (catch (id(#{}))#{a:=1}),
+ {'EXIT',{badarg,_}} = (catch #{}#{a:=1}),
+ Empty = #{},
+ {'EXIT',{badarg,_}} = (catch Empty#{a:=1}),
ok.
t_update_values(Config) when is_list(Config) ->
@@ -633,6 +641,7 @@ t_build_and_match_nil(Config) when is_list(Config) ->
"treat" => V2,
[] => V1 }),
#{ [] := V3, [] := V3 } = id(#{ [] => V1, [] => V3 }),
+ #{ <<1>> := V3, [] := V1 } = id(#{ [] => V1, <<1>> => V3 }),
ok.
t_build_and_match_structure(Config) when is_list(Config) ->
diff --git a/lib/compiler/test/match_SUITE.erl b/lib/compiler/test/match_SUITE.erl
index ae7d764535..7522ee985f 100644
--- a/lib/compiler/test/match_SUITE.erl
+++ b/lib/compiler/test/match_SUITE.erl
@@ -22,7 +22,8 @@
init_per_group/2,end_per_group/2,
pmatch/1,mixed/1,aliases/1,match_in_call/1,
untuplify/1,shortcut_boolean/1,letify_guard/1,
- selectify/1,underscore/1,coverage/1]).
+ selectify/1,underscore/1,match_map/1,map_vars_used/1,
+ coverage/1]).
-include_lib("test_server/include/test_server.hrl").
@@ -33,9 +34,10 @@ all() ->
[{group,p}].
groups() ->
- [{p,test_lib:parallel(),
+ [{p,[parallel],
[pmatch,mixed,aliases,match_in_call,untuplify,
- shortcut_boolean,letify_guard,selectify,underscore,coverage]}].
+ shortcut_boolean,letify_guard,selectify,
+ underscore,match_map,map_vars_used,coverage]}].
init_per_suite(Config) ->
@@ -400,6 +402,36 @@ underscore(Config) when is_list(Config) ->
_ = is_list(Config),
ok.
+-record(s, {map,t}).
+
+match_map(Config) when is_list(Config) ->
+ Map = #{key=>{x,y},ignore=>anything},
+ #s{map=Map,t={x,y}} = do_match_map(#s{map=Map}),
+ {a,#{k:={a,b,c}}} = do_match_map_2(#{k=>{a,b,c}}),
+ ok.
+
+do_match_map(#s{map=#{key:=Val}}=S) ->
+ %% Would crash with a 'badarg' exception.
+ S#s{t=Val}.
+
+do_match_map_2(Map) ->
+ case {a,Map} of
+ {a,#{k:=_}}=Tuple ->
+ Tuple
+ end.
+
+map_vars_used(Config) when is_list(Config) ->
+ {some,value} = do_map_vars_used(a, b, #{{a,b}=>42,v=>{some,value}}),
+ ok.
+
+do_map_vars_used(X, Y, Map) ->
+ case {X,Y} of
+ T ->
+ %% core_lib:is_var_used/2 would not consider T used.
+ #{T:=42,v:=Val} = Map,
+ Val
+ end.
+
coverage(Config) when is_list(Config) ->
%% Cover beam_dead.
ok = coverage_1(x, a),
diff --git a/lib/compiler/test/misc_SUITE.erl b/lib/compiler/test/misc_SUITE.erl
index 44c7161530..68a31f14d5 100644
--- a/lib/compiler/test/misc_SUITE.erl
+++ b/lib/compiler/test/misc_SUITE.erl
@@ -60,7 +60,7 @@ all() ->
[{group,p}].
groups() ->
- [{p,[],%%test_lib:parallel(),
+ [{p,[],
[tobias,empty_string,md5,silly_coverage,
confused_literals,integer_encoding,override_bif]}].
@@ -225,14 +225,15 @@ silly_coverage(Config) when is_list(Config) ->
{label,2}|non_proper_list]}],99},
?line expect_error(fun() -> beam_bool:module(BoolInput, []) end),
- %% beam_dead
+ %% beam_dead. This is tricky. Our function must look OK to
+ %% beam_utils:clean_labels/1, but must crash beam_dead.
DeadInput = {?MODULE,[{foo,0}],[],
[{function,foo,0,2,
[{label,1},
{func_info,{atom,?MODULE},{atom,foo},0},
{label,2},
- {jump,bad}]}],99},
- ?line expect_error(fun() -> beam_block:module(DeadInput, []) end),
+ {test,is_eq_exact,{f,1},[bad,operands]}]}],99},
+ expect_error(fun() -> beam_dead:module(DeadInput, []) end),
%% beam_clean
CleanInput = {?MODULE,[{foo,0}],[],
@@ -279,6 +280,14 @@ silly_coverage(Config) when is_list(Config) ->
{label,2}|non_proper_list]}],99},
expect_error(fun() -> beam_z:module(BeamZInput, []) end),
+ %% beam_validator.
+ BeamValInput = {?MODULE,[{foo,0}],[],
+ [{function,foo,0,2,
+ [{label,1},
+ {func_info,{atom,?MODULE},{atom,foo},0},
+ {label,2}|non_proper_list]}],99},
+ expect_error(fun() -> beam_validator:module(BeamValInput, []) end),
+
ok.
expect_error(Fun) ->
diff --git a/lib/compiler/test/receive_SUITE.erl b/lib/compiler/test/receive_SUITE.erl
index 00a6e900d4..fb82bf6101 100644
--- a/lib/compiler/test/receive_SUITE.erl
+++ b/lib/compiler/test/receive_SUITE.erl
@@ -187,12 +187,13 @@ ref_opt(Config) when is_list(Config) ->
end.
ref_opt_1(Config) ->
- ?line DataDir = ?config(data_dir, Config),
- ?line PrivDir = ?config(priv_dir, Config),
+ DataDir = ?config(data_dir, Config),
+ PrivDir = ?config(priv_dir, Config),
Sources = filelib:wildcard(filename:join([DataDir,"ref_opt","*.{erl,S}"])),
- ?line test_lib:p_run(fun(Src) ->
- do_ref_opt(Src, PrivDir)
- end, Sources),
+ test_lib:p_run(fun(Src) ->
+ do_ref_opt(Src, PrivDir)
+ end, Sources),
+ cover_recv_instructions(),
ok.
do_ref_opt(Source, PrivDir) ->
@@ -202,9 +203,9 @@ do_ref_opt(Source, PrivDir) ->
{outdir,PrivDir}] ++
[from_asm || Ext =:= ".S" ]),
Base = filename:rootname(filename:basename(Source), Ext),
- code:purge(list_to_atom(Base)),
- BeamFile = filename:join(PrivDir, Base),
- code:load_abs(BeamFile),
+ code:purge(list_to_atom(Base)),
+ BeamFile = filename:join(PrivDir, Base),
+ code:load_abs(BeamFile),
ok = Mod:Mod(),
{beam_file,Mod,_,_,_,Code} = beam_disasm:file(BeamFile),
case Base of
@@ -232,6 +233,27 @@ collect_recv_opt_instrs(Code) ->
end] || {function,_,_,_,Is} <- Code],
lists:append(L).
+cover_recv_instructions() ->
+ %% We want to cover the handling of recv_mark and recv_set in beam_utils.
+ %% Since those instructions are introduced in a late optimization pass,
+ %% beam_utils:live_opt() will not see them unless the compilation is
+ %% started from a .S file. The compile_SUITE:asm/1 test case will
+ %% compile all test suite files to .S and then run them through the
+ %% compiler again.
+ %%
+ %% Here will we will ensure that this modules contains recv_mark
+ %% and recv_set instructions.
+ Pid = spawn_link(fun() ->
+ receive {Parent,Ref} ->
+ Parent ! Ref
+ end
+ end),
+ Ref = make_ref(),
+ Pid ! {self(),Ref},
+ receive
+ Ref -> ok
+ end.
+
export(Config) when is_list(Config) ->
Ref = make_ref(),
?line self() ! {result,Ref,42},
diff --git a/lib/compiler/test/record_SUITE.erl b/lib/compiler/test/record_SUITE.erl
index f736e14bf6..8cc90026ec 100644
--- a/lib/compiler/test/record_SUITE.erl
+++ b/lib/compiler/test/record_SUITE.erl
@@ -246,6 +246,14 @@ record_test_2(Config) when is_list(Config) ->
?line Barf = update_barf(Barf0),
?line #barf{a="abc",b=1} = id(Barf),
+ %% Test optimization of is_record/3.
+ false = case id({a,b}) of
+ {_,_}=Tuple -> is_record(Tuple, foo)
+ end,
+ false = case id(true) of
+ true=Bool -> is_record(Bool, foo)
+ end,
+
ok.
record_test_3(Config) when is_list(Config) ->
diff --git a/lib/compiler/test/test_lib.erl b/lib/compiler/test/test_lib.erl
index a8befbecd9..a5e2855f8c 100644
--- a/lib/compiler/test/test_lib.erl
+++ b/lib/compiler/test/test_lib.erl
@@ -18,11 +18,13 @@
%%
-module(test_lib).
--include("test_server.hrl").
+-include_lib("test_server/include/test_server.hrl").
-compile({no_auto_import,[binary_part/2]}).
--export([recompile/1,parallel/0,uniq/0,opt_opts/1,get_data_dir/1,
+-export([id/1,recompile/1,parallel/0,uniq/0,opt_opts/1,get_data_dir/1,
smoke_disasm/1,p_run/2,binary_part/2]).
+id(I) -> I.
+
recompile(Mod) when is_atom(Mod) ->
case whereis(cover_server) of
undefined -> ok;
@@ -44,6 +46,10 @@ smoke_disasm(File) when is_list(File) ->
Res = beam_disasm:file(File),
{beam_file,_Mod} = {element(1, Res),element(2, Res)}.
+%% If we are running cover, we don't want to run test cases that
+%% invokes the compiler in parallel, as doing so would probably
+%% be slower than running them sequentially.
+
parallel() ->
case ?t:is_cover() orelse erlang:system_info(schedulers) =:= 1 of
true -> [];
@@ -90,13 +96,18 @@ get_data_dir(Config) ->
%% Will fail the test case if there were any errors.
p_run(Test, List) ->
+ S = erlang:system_info(schedulers),
N = case ?t:is_cover() of
false ->
- erlang:system_info(schedulers);
+ S + 1;
true ->
- %% Cover is running. Using more than one process
- %% will probably only slow down compilation.
- 1
+ %% Cover is running. Using too many processes
+ %% could slow us down. Measurements on my computer
+ %% showed that using 4 parallel processes was
+ %% slightly faster than using 3. Using more than
+ %% 4 would not buy us much and could actually be
+ %% slower.
+ max(S, 4)
end,
p_run_loop(Test, List, N, [], 0, 0).
diff --git a/lib/compiler/test/trycatch_SUITE.erl b/lib/compiler/test/trycatch_SUITE.erl
index 4530d08c77..80d93fbfa4 100644
--- a/lib/compiler/test/trycatch_SUITE.erl
+++ b/lib/compiler/test/trycatch_SUITE.erl
@@ -24,7 +24,8 @@
catch_oops/1,after_oops/1,eclectic/1,rethrow/1,
nested_of/1,nested_catch/1,nested_after/1,
nested_horrid/1,last_call_optimization/1,bool/1,
- plain_catch_coverage/1,andalso_orelse/1,get_in_try/1]).
+ plain_catch_coverage/1,andalso_orelse/1,get_in_try/1,
+ hockey/1]).
-include_lib("test_server/include/test_server.hrl").
@@ -35,11 +36,12 @@ all() ->
[{group,p}].
groups() ->
- [{p,test_lib:parallel(),
+ [{p,[parallel],
[basic,lean_throw,try_of,try_after,catch_oops,
after_oops,eclectic,rethrow,nested_of,nested_catch,
nested_after,nested_horrid,last_call_optimization,
- bool,plain_catch_coverage,andalso_orelse,get_in_try]}].
+ bool,plain_catch_coverage,andalso_orelse,get_in_try,
+ hockey]}].
init_per_suite(Config) ->
@@ -790,7 +792,6 @@ nested_after_1({X1,C1,V1},
nested_horrid(Config) when is_list(Config) ->
- _V = {make_ref(),nested_horrid,4.711},
{[true,true],{[true,1.0],1.0}} =
nested_horrid_1({true,void,void}, 1.0),
ok.
@@ -944,3 +945,14 @@ get_valid_line([_|T]=Path, Annotations) ->
_:not_found ->
get_valid_line(T, Annotations)
end.
+
+hockey(_) ->
+ {'EXIT',{{badmatch,_},[_|_]}} = (catch hockey()),
+ ok.
+
+hockey() ->
+ %% beam_jump used to generate a call into the try block.
+ %% beam_validator disapproved.
+ receive _ -> (b = fun() -> ok end)
+ + hockey, +x after 0 -> ok end, try (a = fun() -> ok end) + hockey, +
+ y catch _ -> ok end.
diff --git a/lib/compiler/test/warnings_SUITE.erl b/lib/compiler/test/warnings_SUITE.erl
index be0348a92d..6663985ad7 100644
--- a/lib/compiler/test/warnings_SUITE.erl
+++ b/lib/compiler/test/warnings_SUITE.erl
@@ -39,7 +39,7 @@
guard/1,bad_arith/1,bool_cases/1,bad_apply/1,
files/1,effect/1,bin_opt_info/1,bin_construction/1,
comprehensions/1,maps/1,redundant_boolean_clauses/1,
- latin1_fallback/1]).
+ latin1_fallback/1,underscore/1]).
% Default timetrap timeout (set in init_per_testcase).
-define(default_timeout, ?t:minutes(2)).
@@ -64,7 +64,8 @@ groups() ->
[pattern,pattern2,pattern3,pattern4,guard,
bad_arith,bool_cases,bad_apply,files,effect,
bin_opt_info,bin_construction,comprehensions,maps,
- redundant_boolean_clauses,latin1_fallback]}].
+ redundant_boolean_clauses,latin1_fallback,
+ underscore]}].
init_per_suite(Config) ->
Config.
@@ -280,11 +281,12 @@ bad_arith(Config) when is_list(Config) ->
{3,sys_core_fold,{eval_failure,badarith}},
{9,sys_core_fold,nomatch_guard},
{9,sys_core_fold,{eval_failure,badarith}},
+ {9,sys_core_fold,{no_effect,{erlang,is_integer,1}}},
{10,sys_core_fold,nomatch_guard},
{10,sys_core_fold,{eval_failure,badarith}},
{15,sys_core_fold,{eval_failure,badarith}}
] }}],
- ?line [] = run(Config, Ts),
+ [] = run(Config, Ts),
ok.
bool_cases(Config) when is_list(Config) ->
@@ -678,6 +680,45 @@ latin1_fallback(Conf) when is_list(Conf) ->
ok.
+underscore(Config) when is_list(Config) ->
+ S0 = <<"f(A) ->
+ _VAR1 = <<A>>,
+ _VAR2 = {ok,A},
+ _VAR3 = [A],
+ ok.
+ g(A) ->
+ _VAR1 = A/0,
+ _VAR2 = date(),
+ ok.
+ h() ->
+ _VAR1 = fun() -> ok end,
+ ok.
+ i(A) ->
+ _VAR1 = #{A=>42},
+ ok.
+ ">>,
+ Ts0 = [{underscore0,
+ S0,
+ [],
+ {warnings,[{2,sys_core_fold,useless_building},
+ {3,sys_core_fold,useless_building},
+ {4,sys_core_fold,useless_building},
+ {7,sys_core_fold,result_ignored},
+ {8,sys_core_fold,{no_effect,{erlang,date,0}}},
+ {11,sys_core_fold,useless_building},
+ {14,sys_core_fold,useless_building}
+ ]}}],
+ [] = run(Config, Ts0),
+
+ %% Replace all "_VAR<digit>" variables with a plain underscore.
+ %% Now there should be no warnings.
+ S1 = re:replace(S0, "_VAR\\d+", "_", [global]),
+ io:format("~s\n", [S1]),
+ Ts1 = [{underscore1,S1,[],[]}],
+ [] = run(Config, Ts1),
+
+ ok.
+
%%%
%%% End of test cases.
%%%
@@ -699,10 +740,10 @@ run(Config, Tests) ->
%% Compiles a test module and returns the list of errors and warnings.
run_test(Conf, Test0, Warnings) ->
- Mod = "warnings_"++test_lib:uniq(),
- Filename = Mod ++ ".erl",
+ Module = "warnings_"++test_lib:uniq(),
+ Filename = Module ++ ".erl",
?line DataDir = ?privdir,
- Test = ["-module(", Mod, "). ", Test0],
+ Test = ["-module(", Module, "). ", Test0],
?line File = filename:join(DataDir, Filename),
?line Opts = [binary,export_all,return|Warnings],
?line ok = file:write_file(File, Test),
diff --git a/lib/crypto/c_src/crypto.c b/lib/crypto/c_src/crypto.c
index 750f3db7ef..ece29b28e0 100644
--- a/lib/crypto/c_src/crypto.c
+++ b/lib/crypto/c_src/crypto.c
@@ -242,6 +242,7 @@ static ERL_NIF_TERM aes_cfb_8_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM
static ERL_NIF_TERM aes_cfb_128_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM aes_ctr_encrypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM aes_ctr_stream_encrypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
+static ERL_NIF_TERM aes_ecb_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM rand_bytes_1(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM strong_rand_bytes_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM rand_bytes_3(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
@@ -379,6 +380,7 @@ static ErlNifFunc nif_funcs[] = {
{"aes_ctr_decrypt", 3, aes_ctr_encrypt},
{"aes_ctr_stream_encrypt", 2, aes_ctr_stream_encrypt},
{"aes_ctr_stream_decrypt", 2, aes_ctr_stream_encrypt},
+ {"aes_ecb_crypt", 3, aes_ecb_crypt},
{"rand_bytes", 1, rand_bytes_1},
{"strong_rand_bytes_nif", 1, strong_rand_bytes_nif},
{"rand_bytes", 3, rand_bytes_3},
@@ -410,7 +412,7 @@ static ErlNifFunc nif_funcs[] = {
{"bf_ecb_crypt", 3, bf_ecb_crypt},
{"blowfish_ofb64_encrypt", 3, blowfish_ofb64_encrypt},
- {"ec_key_generate", 1, ec_key_generate},
+ {"ec_key_generate", 2, ec_key_generate},
{"ecdsa_sign_nif", 4, ecdsa_sign_nif},
{"ecdsa_verify_nif", 5, ecdsa_verify_nif},
{"ecdh_compute_key_nif", 3, ecdh_compute_key_nif},
@@ -2032,6 +2034,38 @@ static ERL_NIF_TERM chacha20_poly1305_decrypt(ErlNifEnv* env, int argc, const ER
#endif
}
+static ERL_NIF_TERM aes_ecb_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{/* (Key, Data, IsEncrypt) */
+ ErlNifBinary key_bin, data_bin;
+ AES_KEY aes_key;
+ int i;
+ unsigned char* ret_ptr;
+ ERL_NIF_TERM ret;
+
+ CHECK_OSE_CRYPTO();
+
+ if (!enif_inspect_iolist_as_binary(env, argv[0], &key_bin)
+ || (key_bin.size != 16 && key_bin.size != 32)
+ || !enif_inspect_iolist_as_binary(env, argv[1], &data_bin)
+ || data_bin.size % 16 != 0) {
+ return enif_make_badarg(env);
+ }
+
+ if (argv[2] == atom_true) {
+ i = AES_ENCRYPT;
+ AES_set_encrypt_key(key_bin.data, key_bin.size*8, &aes_key);
+ }
+ else {
+ i = AES_DECRYPT;
+ AES_set_decrypt_key(key_bin.data, key_bin.size*8, &aes_key);
+ }
+
+ ret_ptr = enif_make_new_binary(env, data_bin.size, &ret);
+ AES_ecb_encrypt(data_bin.data, ret_ptr, &aes_key, i);
+ CONSUME_REDS(env,data_bin);
+ return ret;
+}
+
static ERL_NIF_TERM rand_bytes_1(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
{/* (Bytes) */
unsigned bytes;
@@ -3714,32 +3748,37 @@ out:
static ERL_NIF_TERM ec_key_generate(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
{
#if defined(HAVE_EC)
- EC_KEY *key = ec_key_new(env, argv[0]);
+ EC_KEY *key;
+ const EC_GROUP *group;
+ const EC_POINT *public_key;
+ ERL_NIF_TERM priv_key;
+ ERL_NIF_TERM pub_key = atom_undefined;
CHECK_OSE_CRYPTO();
- if (key && EC_KEY_generate_key(key)) {
- const EC_GROUP *group;
- const EC_POINT *public_key;
- ERL_NIF_TERM priv_key;
- ERL_NIF_TERM pub_key = atom_undefined;
-
- group = EC_KEY_get0_group(key);
- public_key = EC_KEY_get0_public_key(key);
+ if (!get_ec_key(env, argv[0], argv[1], atom_undefined, &key))
+ goto badarg;
- if (group && public_key) {
- pub_key = point2term(env, group, public_key,
- EC_KEY_get_conv_form(key));
- }
- priv_key = bn2term(env, EC_KEY_get0_private_key(key));
- EC_KEY_free(key);
- return enif_make_tuple2(env, pub_key, priv_key);
+ if (argv[1] == atom_undefined) {
+ if (!EC_KEY_generate_key(key))
+ goto badarg;
}
- else {
- if (key)
- EC_KEY_free(key);
- return enif_make_badarg(env);
+
+ group = EC_KEY_get0_group(key);
+ public_key = EC_KEY_get0_public_key(key);
+
+ if (group && public_key) {
+ pub_key = point2term(env, group, public_key,
+ EC_KEY_get_conv_form(key));
}
+ priv_key = bn2term(env, EC_KEY_get0_private_key(key));
+ EC_KEY_free(key);
+ return enif_make_tuple2(env, pub_key, priv_key);
+
+badarg:
+ if (key)
+ EC_KEY_free(key);
+ return enif_make_badarg(env);
#else
return atom_notsup;
#endif
diff --git a/lib/crypto/doc/src/crypto.xml b/lib/crypto/doc/src/crypto.xml
index 271130a9e6..4a8ba5c1bf 100644
--- a/lib/crypto/doc/src/crypto.xml
+++ b/lib/crypto/doc/src/crypto.xml
@@ -170,6 +170,36 @@
<funcs>
<func>
+ <name>block_encrypt(Type, Key, PlainText) -> CipherText</name>
+ <fsummary>Encrypt <c>PlainText</c> according to <c>Type</c> block cipher</fsummary>
+ <type>
+ <v>Type = des_ecb | blowfish_ecb | aes_ecb </v>
+ <v>Key = block_key() </v>
+ <v>PlainText = iodata() </v>
+ </type>
+ <desc>
+ <p>Encrypt <c>PlainText</c> according to <c>Type</c> block cipher.</p>
+ <p>May throw exception <c>notsup</c> in case the chosen <c>Type</c>
+ is not supported by the underlying OpenSSL implementation.</p>
+ </desc>
+ </func>
+
+ <func>
+ <name>block_decrypt(Type, Key, CipherText) -> PlainText</name>
+ <fsummary>Decrypt <c>CipherText</c> according to <c>Type</c> block cipher</fsummary>
+ <type>
+ <v>Type = des_ecb | blowfish_ecb | aes_ecb </v>
+ <v>Key = block_key() </v>
+ <v>PlainText = iodata() </v>
+ </type>
+ <desc>
+ <p>Decrypt <c>CipherText</c> according to <c>Type</c> block cipher.</p>
+ <p>May throw exception <c>notsup</c> in case the chosen <c>Type</c>
+ is not supported by the underlying OpenSSL implementation.</p>
+ </desc>
+ </func>
+
+ <func>
<name>block_encrypt(Type, Key, Ivec, PlainText) -> CipherText</name>
<name>block_encrypt(AeadType, Key, Ivec, {AAD, PlainText}) -> {CipherText, CipherTag}</name>
<fsummary>Encrypt <c>PlainText</c> according to <c>Type</c> block cipher</fsummary>
@@ -181,7 +211,7 @@
<v>AAD = IVec = CipherText = CipherTag = binary()</v>
</type>
<desc>
- <p>Encrypt <c>PlainText</c>according to <c>Type</c> block cipher.
+ <p>Encrypt <c>PlainText</c> according to <c>Type</c> block cipher.
<c>IVec</c> is an arbitrary initializing vector.</p>
<p>In AEAD (Authenticated Encryption with Associated Data) mode, encrypt
<c>PlainText</c>according to <c>Type</c> block cipher and calculate
@@ -203,7 +233,7 @@
<v>AAD = IVec = CipherText = CipherTag = binary()</v>
</type>
<desc>
- <p>Decrypt <c>CipherText</c>according to <c>Type</c> block cipher.
+ <p>Decrypt <c>CipherText</c> according to <c>Type</c> block cipher.
<c>IVec</c> is an arbitrary initializing vector.</p>
<p>In AEAD (Authenticated Encryption with Associated Data) mode, decrypt
<c>CipherText</c>according to <c>Type</c> block cipher and check the authenticity
@@ -269,7 +299,7 @@
<v>SrpUserParams = {user, [Generator::binary(), Prime::binary(), Version::atom()]}</v>
<v>SrpHostParams = {host, [Verifier::binary(), Generator::binary(), Prime::binary(), Version::atom()]}</v>
<v>PublicKey = dh_public() | ecdh_public() | srp_public() </v>
- <v>PrivKeyIn = undefined | dh_private() | srp_private() </v>
+ <v>PrivKeyIn = undefined | dh_private() | ecdh_private() | srp_private() </v>
<v>PrivKeyOut = dh_private() | ecdh_private() | srp_private() </v>
</type>
<desc>
diff --git a/lib/crypto/src/crypto.erl b/lib/crypto/src/crypto.erl
index 7f82fa83fd..aaae9c027d 100644
--- a/lib/crypto/src/crypto.erl
+++ b/lib/crypto/src/crypto.erl
@@ -211,7 +211,7 @@ supports()->
[{hashs, Hashs},
{ciphers, [des_cbc, des_cfb, des3_cbc, des_ede3, blowfish_cbc,
blowfish_cfb64, blowfish_ofb64, blowfish_ecb, aes_cbc128, aes_cfb8, aes_cfb128,
- aes_cbc256, rc2_cbc, aes_ctr, rc4] ++ Ciphers},
+ aes_cbc256, rc2_cbc, aes_ctr, rc4, aes_ecb] ++ Ciphers},
{public_keys, [rsa, dss, dh, srp] ++ PubKeys}
].
@@ -368,19 +368,24 @@ block_decrypt(chacha20_poly1305, Key, Ivec, {AAD, Data, Tag}) ->
end;
block_decrypt(rc2_cbc, Key, Ivec, Data) ->
rc2_cbc_decrypt(Key, Ivec, Data).
--spec block_encrypt(des_ecb | blowfish_ecb, Key::iodata(), Data::iodata()) -> binary().
+
+-spec block_encrypt(des_ecb | blowfish_ecb | aes_ecb, Key::iodata(), Data::iodata()) -> binary().
block_encrypt(des_ecb, Key, Data) ->
des_ecb_encrypt(Key, Data);
block_encrypt(blowfish_ecb, Key, Data) ->
- blowfish_ecb_encrypt(Key, Data).
+ blowfish_ecb_encrypt(Key, Data);
+block_encrypt(aes_ecb, Key, Data) ->
+ aes_ecb_encrypt(Key, Data).
--spec block_decrypt(des_ecb | blowfish_ecb, Key::iodata(), Data::iodata()) -> binary().
+-spec block_decrypt(des_ecb | blowfish_ecb | aes_ecb, Key::iodata(), Data::iodata()) -> binary().
block_decrypt(des_ecb, Key, Data) ->
des_ecb_decrypt(Key, Data);
block_decrypt(blowfish_ecb, Key, Data) ->
- blowfish_ecb_decrypt(Key, Data).
+ blowfish_ecb_decrypt(Key, Data);
+block_decrypt(aes_ecb, Key, Data) ->
+ aes_ecb_decrypt(Key, Data).
-spec next_iv(des_cbc | des3_cbc | aes_cbc | aes_ige, Data::iodata()) -> binary().
@@ -588,9 +593,8 @@ generate_key(srp, {user, [Generator, Prime, Version]}, PrivateArg)
end,
user_srp_gen_key(Private, Generator, Prime);
-generate_key(ecdh, Curve, undefined) ->
- ec_key_generate(nif_curve_params(Curve)).
-
+generate_key(ecdh, Curve, PrivKey) ->
+ ec_key_generate(nif_curve_params(Curve), ensure_int_as_bin(PrivKey)).
compute_key(dh, OthersPublicKey, MyPrivateKey, DHParameters) ->
case dh_compute_key_nif(ensure_int_as_bin(OthersPublicKey),
@@ -1393,6 +1397,21 @@ aes_ctr_encrypt(_Key, _IVec, _Data) -> ?nif_stub.
aes_ctr_decrypt(_Key, _IVec, _Cipher) -> ?nif_stub.
%%
+%% AES - in electronic codebook mode (ECB)
+%%
+-spec aes_ecb_crypt(iodata(), iodata(), integer()) ->
+ binary().
+
+aes_ecb_encrypt(Key, Data) ->
+ aes_ecb_crypt(Key, Data, true).
+
+aes_ecb_decrypt(Key, Data) ->
+ aes_ecb_crypt(Key, Data, false).
+
+aes_ecb_crypt(_Key, __Data, _IsEncrypt) -> ?nif_stub.
+
+
+%%
%% AES - in counter mode (CTR) with state maintained for multi-call streaming
%%
-type ctr_state() :: { iodata(), binary(), binary(), integer() }.
@@ -1555,7 +1574,7 @@ dh_compute_key(OthersPublicKey, MyPrivateKey, DHParameters) ->
dh_compute_key_nif(_OthersPublicKey, _MyPrivateKey, _DHParameters) -> ?nif_stub.
-ec_key_generate(_Key) -> ?nif_stub.
+ec_key_generate(_Curve, _Key) -> ?nif_stub.
ecdh_compute_key_nif(_Others, _Curve, _My) -> ?nif_stub.
diff --git a/lib/crypto/test/crypto_SUITE.erl b/lib/crypto/test/crypto_SUITE.erl
index 1031e6403f..7fcfc1ffc5 100644
--- a/lib/crypto/test/crypto_SUITE.erl
+++ b/lib/crypto/test/crypto_SUITE.erl
@@ -58,6 +58,7 @@ all() ->
{group, aes_cfb8},
{group, aes_cfb128},
{group, aes_cbc256},
+ {group, aes_ecb},
{group, aes_ige256},
{group, rc2_cbc},
{group, rc4},
@@ -84,7 +85,7 @@ groups() ->
{dss, [], [sign_verify]},
{ecdsa, [], [sign_verify]},
{dh, [], [generate_compute]},
- {ecdh, [], [compute]},
+ {ecdh, [], [compute, generate]},
{srp, [], [generate_compute]},
{des_cbc, [], [block]},
{des_cfb, [], [block]},
@@ -96,6 +97,7 @@ groups() ->
{aes_cfb8,[], [block]},
{aes_cfb128,[], [block]},
{aes_cbc256,[], [block]},
+ {aes_ecb,[], [block]},
{aes_ige256,[], [block]},
{blowfish_cbc, [], [block]},
{blowfish_ecb, [], [block]},
@@ -243,6 +245,12 @@ compute(Config) when is_list(Config) ->
Gen = proplists:get_value(compute, Config),
lists:foreach(fun do_compute/1, Gen).
%%--------------------------------------------------------------------
+generate() ->
+ [{doc, " Test crypto:generate_key"}].
+generate(Config) when is_list(Config) ->
+ Gen = proplists:get_value(generate, Config),
+ lists:foreach(fun do_generate/1, Gen).
+%%--------------------------------------------------------------------
mod_pow() ->
[{doc, "mod_pow testing (A ^ M % P with bignums)"}].
mod_pow(Config) when is_list(Config) ->
@@ -494,6 +502,14 @@ do_compute({ecdh = Type, Pub, Priv, Curve, SharedSecret}) ->
ct:fail({{crypto, compute_key, [Type, Pub, Priv, Curve]}, {expected, SharedSecret}, {got, Other}})
end.
+do_generate({ecdh = Type, Curve, Priv, Pub}) ->
+ case crypto:generate_key(Type, Curve, Priv) of
+ {Pub, _} ->
+ ok;
+ {Other, _} ->
+ ct:fail({{crypto, generate_key, [Type, Priv, Curve]}, {expected, Pub}, {got, Other}})
+ end.
+
hexstr2point(X, Y) ->
<<4:8, (hexstr2bin(X))/binary, (hexstr2bin(Y))/binary>>.
@@ -721,7 +737,8 @@ group_config(srp, Config) ->
[{generate_compute, GenerateCompute} | Config];
group_config(ecdh, Config) ->
Compute = ecdh(),
- [{compute, Compute} | Config];
+ Generate = ecc(),
+ [{compute, Compute}, {generate, Generate} | Config];
group_config(dh, Config) ->
GenerateCompute = [dh()],
[{generate_compute, GenerateCompute} | Config];
@@ -749,6 +766,9 @@ group_config(aes_cbc128, Config) ->
group_config(aes_cbc256, Config) ->
Block = aes_cbc256(),
[{block, Block} | Config];
+group_config(aes_ecb, Config) ->
+ Block = aes_ecb(),
+ [{block, Block} | Config];
group_config(aes_ige256, Config) ->
Block = aes_ige256(),
[{block, Block} | Config];
@@ -1183,6 +1203,106 @@ aes_cbc256() ->
hexstr2bin("f69f2445df4f9b17ad2b417be66c3710")}
].
+aes_ecb() ->
+ [
+ {aes_ecb,
+ <<"YELLOW SUBMARINE">>,
+ <<"YELLOW SUBMARINE">>},
+ {aes_ecb,
+ <<"0000000000000000">>,
+ <<"0000000000000000">>},
+ {aes_ecb,
+ <<"FFFFFFFFFFFFFFFF">>,
+ <<"FFFFFFFFFFFFFFFF">>},
+ {aes_ecb,
+ <<"3000000000000000">>,
+ <<"1000000000000001">>},
+ {aes_ecb,
+ <<"1111111111111111">>,
+ <<"1111111111111111">>},
+ {aes_ecb,
+ <<"0123456789ABCDEF">>,
+ <<"1111111111111111">>},
+ {aes_ecb,
+ <<"0000000000000000">>,
+ <<"0000000000000000">>},
+ {aes_ecb,
+ <<"FEDCBA9876543210">>,
+ <<"0123456789ABCDEF">>},
+ {aes_ecb,
+ <<"7CA110454A1A6E57">>,
+ <<"01A1D6D039776742">>},
+ {aes_ecb,
+ <<"0131D9619DC1376E">>,
+ <<"5CD54CA83DEF57DA">>},
+ {aes_ecb,
+ <<"07A1133E4A0B2686">>,
+ <<"0248D43806F67172">>},
+ {aes_ecb,
+ <<"3849674C2602319E">>,
+ <<"51454B582DDF440A">>},
+ {aes_ecb,
+ <<"04B915BA43FEB5B6">>,
+ <<"42FD443059577FA2">>},
+ {aes_ecb,
+ <<"0113B970FD34F2CE">>,
+ <<"059B5E0851CF143A">>},
+ {aes_ecb,
+ <<"0170F175468FB5E6">>,
+ <<"0756D8E0774761D2">>},
+ {aes_ecb,
+ <<"43297FAD38E373FE">>,
+ <<"762514B829BF486A">>},
+ {aes_ecb,
+ <<"07A7137045DA2A16">>,
+ <<"3BDD119049372802">>},
+ {aes_ecb,
+ <<"04689104C2FD3B2F">>,
+ <<"26955F6835AF609A">>},
+ {aes_ecb,
+ <<"37D06BB516CB7546">>,
+ <<"164D5E404F275232">>},
+ {aes_ecb,
+ <<"1F08260D1AC2465E">>,
+ <<"6B056E18759F5CCA">>},
+ {aes_ecb,
+ <<"584023641ABA6176">>,
+ <<"004BD6EF09176062">>},
+ {aes_ecb,
+ <<"025816164629B007">>,
+ <<"480D39006EE762F2">>},
+ {aes_ecb,
+ <<"49793EBC79B3258F">>,
+ <<"437540C8698F3CFA">>},
+ {aes_ecb,
+ <<"018310DC409B26D6">>,
+ <<"1D9D5C5018F728C2">>},
+ {aes_ecb,
+ <<"1C587F1C13924FEF">>,
+ <<"305532286D6F295A">>},
+ {aes_ecb,
+ <<"0101010101010101">>,
+ <<"0123456789ABCDEF">>},
+ {aes_ecb,
+ <<"1F1F1F1F0E0E0E0E">>,
+ <<"0123456789ABCDEF">>},
+ {aes_ecb,
+ <<"E0FEE0FEF1FEF1FE">>,
+ <<"0123456789ABCDEF">>},
+ {aes_ecb,
+ <<"0000000000000000">>,
+ <<"FFFFFFFFFFFFFFFF">>},
+ {aes_ecb,
+ <<"FFFFFFFFFFFFFFFF">>,
+ <<"0000000000000000">>},
+ {aes_ecb,
+ <<"0123456789ABCDEF">>,
+ <<"0000000000000000">>},
+ {aes_ecb,
+ <<"FEDCBA9876543210">>,
+ <<"FFFFFFFFFFFFFFFF">>}
+ ].
+
aes_ige256() ->
[{aes_ige256,
hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"),
@@ -1968,6 +2088,27 @@ rsa_oaep() ->
Msg = hexstr2bin("750c4047f547e8e41411856523298ac9bae245efaf1397fbe56f9dd5"),
{rsa, Public, Private, Msg, rsa_pkcs1_oaep_padding}.
+ecc() ->
+%% http://point-at-infinity.org/ecc/nisttv
+%%
+%% Test vectors for the NIST elliptic curves P192, P224, P256, P384, P521,
+%% B163, B233, B283, B409, B571, K163, K233, K283, K409 and K571. For more
+%% information about the curves see
+%% http://csrc.nist.gov/encryption/dss/ecdsa/NISTReCur.pdf
+%%
+ [{ecdh,secp192r1,1,
+ hexstr2point("188DA80EB03090F67CBF20EB43A18800F4FF0AFD82FF1012",
+ "07192B95FFC8DA78631011ED6B24CDD573F977A11E794811")},
+ {ecdh,secp192r1,2,
+ hexstr2point("DAFEBF5828783F2AD35534631588A3F629A70FB16982A888",
+ "DD6BDA0D993DA0FA46B27BBC141B868F59331AFA5C7E93AB")},
+ {ecdh,secp192r1,3,
+ hexstr2point("76E32A2557599E6EDCD283201FB2B9AADFD0D359CBB263DA",
+ "782C37E372BA4520AA62E0FED121D49EF3B543660CFD05FD")},
+ {ecdh,secp192r1,4,
+ hexstr2point("35433907297CC378B0015703374729D7A4FE46647084E4BA",
+ "A2649984F2135C301EA3ACB0776CD4F125389B311DB3BE32")}].
+
no_padding() ->
Public = [_, Mod] = rsa_public(),
Private = rsa_private(),
diff --git a/lib/debugger/src/dbg_wx_settings.erl b/lib/debugger/src/dbg_wx_settings.erl
index 20aac74c3d..2c332c0a54 100644
--- a/lib/debugger/src/dbg_wx_settings.erl
+++ b/lib/debugger/src/dbg_wx_settings.erl
@@ -65,14 +65,8 @@ open_win(Win, Pos, SFile, Str, What) ->
{style,What}]),
case wxFileDialog:showModal(FD) of
?wxID_OK ->
- case wxFileDialog:getPaths(FD) of
- [NewFile] ->
- wxFileDialog:destroy(FD),
- {ok, NewFile};
- _ ->
- wxFileDialog:destroy(FD),
- cancel
- end;
+ NewFile = wxFileDialog:getPath(FD),
+ {ok, NewFile};
_ ->
wxFileDialog:destroy(FD),
cancel
diff --git a/lib/dialyzer/doc/src/dialyzer.xml b/lib/dialyzer/doc/src/dialyzer.xml
index e482b1e6f8..b52c1edebf 100644
--- a/lib/dialyzer/doc/src/dialyzer.xml
+++ b/lib/dialyzer/doc/src/dialyzer.xml
@@ -139,7 +139,11 @@
<tag><c><![CDATA[-Wwarn]]></c></tag>
<item>A family of options which selectively turn on/off warnings
(for help on the names of warnings use
- <c><![CDATA[dialyzer -Whelp]]></c>).</item>
+ <c><![CDATA[dialyzer -Whelp]]></c>).
+ Note that the options can also be given in the file with a
+ <c>-dialyzer()</c> attribute. See <seealso
+ marker="#suppression">Requesting or Suppressing Warnings in
+ Source Files</seealso> below for details.</item>
<tag><c><![CDATA[--shell]]></c></tag>
<item>Do not disable the Erlang shell while running the GUI.</item>
<tag><c><![CDATA[--version]]></c> (or <c><![CDATA[-v]]></c>)</tag>
@@ -269,6 +273,71 @@
given from the command line, so please refer to the sections above for
a description of these.</p>
</section>
+
+ <section>
+ <marker id="suppression"></marker>
+ <title>Requesting or Suppressing Warnings in Source Files</title>
+ <p>
+ The <c>-dialyzer()</c> attribute can be used for turning off
+ warnings in a module by specifying functions or warning options.
+ For example, to turn off all warnings for the function
+ <c>f/0</c>, include the following line:
+ </p>
+<code type="none">
+-dialyzer({nowarn_function, f/0}).
+</code>
+ <p>To turn off warnings for improper lists, add the following line
+ to the source file:
+ </p>
+<code type="none">
+-dialyzer(no_improper_lists).
+</code>
+ <p>The <c>-dialyzer()</c> attribute is allowed after function
+ declarations. Lists of warning options or functions are allowed:
+ </p>
+<code type="none">
+-dialyzer([{nowarn_function, [f/0]}, no_improper_lists]).
+</code>
+ <p>
+ Warning options can be restricted to functions:
+ </p>
+<code type="none">
+-dialyzer({no_improper_lists, g/0}).
+</code>
+<code type="none">
+-dialyzer({[no_return, no_match], [g/0, h/0]}).
+</code>
+ <p>
+ For help on the warning options use <c>dialyzer -Whelp</c>. The
+ options are also enumerated <seealso
+ marker="#gui/1">below</seealso> (<c>WarnOpts</c>).
+ </p>
+ <note>
+ <p>
+ The <c>-dialyzer()</c> attribute is not checked by the Erlang
+ Compiler, but by the Dialyzer itself.
+ </p>
+ </note>
+ <note>
+ <p>
+ The warning option <c>-Wrace_conditions</c> has no effect when
+ set in source files.
+ </p>
+ </note>
+ <p>
+ The <c>-dialyzer()</c> attribute can also be used for turning on
+ warnings. For instance, if a module has been fixed regarding
+ unmatched returns, adding the line
+ </p>
+<code type="none">
+-dialyzer(unmatched_returns).
+</code>
+ <p>
+ can help in assuring that no new unmatched return warnings are
+ introduced.
+ </p>
+ </section>
+
<funcs>
<func>
<name>gui() -> ok | {error, Msg}</name>
@@ -283,7 +352,7 @@
OptList :: [Option]
Option :: {files, [Filename :: string()]}
| {files_rec, [DirName :: string()]}
- | {defines, [{Macro: atom(), Value : term()}]}
+ | {defines, [{Macro :: atom(), Value :: term()}]}
| {from, src_code | byte_code} %% Defaults to byte_code
| {init_plt, FileName :: string()} %% If changed from default
| {plts, [FileName :: string()]} %% If changed from default
diff --git a/lib/dialyzer/src/dialyzer.erl b/lib/dialyzer/src/dialyzer.erl
index cec94a49fd..c9e7da9ef0 100644
--- a/lib/dialyzer/src/dialyzer.erl
+++ b/lib/dialyzer/src/dialyzer.erl
@@ -282,15 +282,17 @@ cl_check_log(none) ->
cl_check_log(Output) ->
io:format(" Check output file `~s' for details\n", [Output]).
--spec format_warning(dial_warning()) -> string().
+-spec format_warning(raw_warning()) -> string().
format_warning(W) ->
format_warning(W, basename).
--spec format_warning(dial_warning(), fopt()) -> string().
+-spec format_warning(raw_warning() | dial_warning(), fopt()) -> string().
+format_warning({Tag, {File, Line, _MFA}, Msg}, FOpt) ->
+ format_warning({Tag, {File, Line}, Msg}, FOpt);
format_warning({_Tag, {File, Line}, Msg}, FOpt) when is_list(File),
- is_integer(Line) ->
+ is_integer(Line) ->
F = case FOpt of
fullpath -> File;
basename -> filename:basename(File)
diff --git a/lib/dialyzer/src/dialyzer.hrl b/lib/dialyzer/src/dialyzer.hrl
index 9a25f86512..90addc35a8 100644
--- a/lib/dialyzer/src/dialyzer.hrl
+++ b/lib/dialyzer/src/dialyzer.hrl
@@ -84,6 +84,15 @@
-type dial_warning() :: {dial_warn_tag(), file_line(), {atom(), [term()]}}.
%%
+%% This is the representation of each warning before suppressions have
+%% been applied
+%%
+-type m_or_mfa() :: module() % warnings not associated with any function
+ | mfa().
+-type warning_info() :: {file:filename(), non_neg_integer(), m_or_mfa()}.
+-type raw_warning() :: {dial_warn_tag(), warning_info(), {atom(), [term()]}}.
+
+%%
%% This is the representation of dialyzer's internal errors
%%
-type dial_error() :: any(). %% XXX: underspecified
@@ -103,6 +112,7 @@
-type fopt() :: 'basename' | 'fullpath'.
-type format() :: 'formatted' | 'raw'.
-type label() :: non_neg_integer().
+-type dial_warn_tags():: ordsets:ordset(dial_warn_tag()).
-type rep_mode() :: 'quiet' | 'normal' | 'verbose'.
-type start_from() :: 'byte_code' | 'src_code'.
-type mfa_or_funlbl() :: label() | mfa().
@@ -138,7 +148,7 @@
init_plts = [] :: [file:filename()],
include_dirs = [] :: [file:filename()],
output_plt = none :: 'none' | file:filename(),
- legal_warnings = ordsets:new() :: ordsets:ordset(dial_warn_tag()),
+ legal_warnings = ordsets:new() :: dial_warn_tags(),
report_mode = normal :: rep_mode(),
erlang_mode = false :: boolean(),
use_contracts = true :: boolean(),
diff --git a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
index af1c2b7e3a..5ff7ad9c6f 100644
--- a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
+++ b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
@@ -39,8 +39,6 @@
one_file_result/0,
compile_result/0]).
--export_type([no_warn_unused/0]).
-
-include("dialyzer.hrl").
-record(analysis_state,
@@ -50,8 +48,9 @@
defines = [] :: [dial_define()],
doc_plt :: dialyzer_plt:plt(),
include_dirs = [] :: [file:filename()],
- no_warn_unused :: no_warn_unused(),
parent :: pid(),
+ legal_warnings :: % command line options
+ [dial_warn_tag()],
plt :: dialyzer_plt:plt(),
start_from = byte_code :: start_from(),
use_contracts = true :: boolean(),
@@ -59,9 +58,10 @@
solvers :: [solver()]
}).
--record(server_state, {parent :: pid(), legal_warnings :: [dial_warn_tag()]}).
-
--type no_warn_unused() :: sets:set(mfa()).
+-record(server_state,
+ {
+ parent :: pid()
+ }).
%%--------------------------------------------------------------------
%% Main
@@ -75,24 +75,24 @@ start(Parent, LegalWarnings, Analysis) ->
Analysis0 =
Analysis#analysis{race_detection = RacesOn, timing_server = TimingServer},
Analysis1 = expand_files(Analysis0),
- Analysis2 = run_analysis(Analysis1),
- State = #server_state{parent = Parent, legal_warnings = LegalWarnings},
+ Analysis2 = run_analysis(Analysis1, LegalWarnings),
+ State = #server_state{parent = Parent},
loop(State, Analysis2, none),
dialyzer_timing:stop(TimingServer).
-run_analysis(Analysis) ->
+run_analysis(Analysis, LegalWarnings) ->
Self = self(),
- Fun = fun() -> analysis_start(Self, Analysis) end,
+ Fun = fun() -> analysis_start(Self, Analysis, LegalWarnings) end,
Analysis#analysis{analysis_pid = spawn_link(Fun)}.
-loop(#server_state{parent = Parent, legal_warnings = LegalWarnings} = State,
+loop(#server_state{parent = Parent} = State,
#analysis{analysis_pid = AnalPid} = Analysis, ExtCalls) ->
receive
{AnalPid, log, LogMsg} ->
send_log(Parent, LogMsg),
loop(State, Analysis, ExtCalls);
{AnalPid, warnings, Warnings} ->
- case filter_warnings(LegalWarnings, Warnings) of
+ case Warnings of
[] -> ok;
SendWarnings ->
send_warnings(Parent, SendWarnings)
@@ -129,7 +129,7 @@ loop(#server_state{parent = Parent, legal_warnings = LegalWarnings} = State,
%% The Analysis
%%--------------------------------------------------------------------
-analysis_start(Parent, Analysis) ->
+analysis_start(Parent, Analysis, LegalWarnings) ->
CServer = dialyzer_codeserver:new(),
Plt = Analysis#analysis.plt,
State = #analysis_state{codeserver = CServer,
@@ -139,13 +139,14 @@ analysis_start(Parent, Analysis) ->
include_dirs = Analysis#analysis.include_dirs,
plt = Plt,
parent = Parent,
+ legal_warnings = LegalWarnings,
start_from = Analysis#analysis.start_from,
use_contracts = Analysis#analysis.use_contracts,
timing_server = Analysis#analysis.timing_server,
solvers = Analysis#analysis.solvers
},
Files = ordsets:from_list(Analysis#analysis.files),
- {Callgraph, NoWarn, TmpCServer0} = compile_and_store(Files, State),
+ {Callgraph, TmpCServer0} = compile_and_store(Files, State),
%% Remote type postprocessing
NewCServer =
try
@@ -177,7 +178,6 @@ analysis_start(Parent, Analysis) ->
State0 = State#analysis_state{plt = NewPlt1},
dump_callgraph(Callgraph, State0, Analysis),
State1 = State0#analysis_state{codeserver = NewCServer},
- State2 = State1#analysis_state{no_warn_unused = NoWarn},
%% Remove all old versions of the files being analyzed
AllNodes = dialyzer_callgraph:all_nodes(Callgraph),
Plt1 = dialyzer_plt:delete_list(NewPlt1, AllNodes),
@@ -187,14 +187,14 @@ analysis_start(Parent, Analysis) ->
true -> dialyzer_callgraph:put_race_detection(true, Callgraph);
false -> Callgraph
end,
- State3 = analyze_callgraph(NewCallgraph, State2#analysis_state{plt = Plt1}),
+ State2 = analyze_callgraph(NewCallgraph, State1#analysis_state{plt = Plt1}),
dialyzer_callgraph:dispose_race_server(NewCallgraph),
rcv_and_send_ext_types(Parent),
NonExports = sets:subtract(sets:from_list(AllNodes), Exports),
NonExportsList = sets:to_list(NonExports),
- Plt2 = dialyzer_plt:delete_list(State3#analysis_state.plt, NonExportsList),
- send_codeserver_plt(Parent, CServer, State3#analysis_state.plt),
- send_analysis_done(Parent, Plt2, State3#analysis_state.doc_plt).
+ Plt2 = dialyzer_plt:delete_list(State2#analysis_state.plt, NonExportsList),
+ send_codeserver_plt(Parent, CServer, State2#analysis_state.plt),
+ send_analysis_done(Parent, Plt2, State2#analysis_state.doc_plt).
analyze_callgraph(Callgraph, #analysis_state{codeserver = Codeserver,
doc_plt = DocPlt,
@@ -210,11 +210,11 @@ analyze_callgraph(Callgraph, #analysis_state{codeserver = Codeserver,
TimingServer, Solvers, Parent),
{NewPlt0, DocPlt};
succ_typings ->
- NoWarn = State#analysis_state.no_warn_unused,
{Warnings, NewPlt0, NewDocPlt0} =
dialyzer_succ_typings:get_warnings(Callgraph, Plt, DocPlt, Codeserver,
- NoWarn, TimingServer, Solvers, Parent),
- send_warnings(State#analysis_state.parent, Warnings),
+ TimingServer, Solvers, Parent),
+ Warnings1 = filter_warnings(Warnings, Codeserver),
+ send_warnings(State#analysis_state.parent, Warnings1),
{NewPlt0, NewDocPlt0}
end,
dialyzer_callgraph:delete(Callgraph),
@@ -230,19 +230,22 @@ analyze_callgraph(Callgraph, #analysis_state{codeserver = Codeserver,
defines = [] :: [dial_define()],
include_dirs = [] :: [file:filename()],
start_from = byte_code :: start_from(),
- use_contracts = true :: boolean()
+ use_contracts = true :: boolean(),
+ legal_warnings :: [dial_warn_tag()]
}).
make_compile_init(#analysis_state{codeserver = Codeserver,
defines = Defs,
include_dirs = Dirs,
use_contracts = UseContracts,
+ legal_warnings = LegalWarnings,
start_from = StartFrom}, Callgraph) ->
#compile_init{callgraph = Callgraph,
codeserver = Codeserver,
defines = [{d, Macro, Val} || {Macro, Val} <- Defs],
include_dirs = [{i, D} || D <- Dirs],
use_contracts = UseContracts,
+ legal_warnings = LegalWarnings,
start_from = StartFrom}.
compile_and_store(Files, #analysis_state{codeserver = CServer,
@@ -252,7 +255,7 @@ compile_and_store(Files, #analysis_state{codeserver = CServer,
{T1, _} = statistics(wall_clock),
Callgraph = dialyzer_callgraph:new(),
CompileInit = make_compile_init(State, Callgraph),
- {{Failed, NoWarn, Modules}, NextLabel} =
+ {{Failed, Modules}, NextLabel} =
?timing(Timing, "compile", _C1,
dialyzer_coordinator:parallel_job(compile, Files,
CompileInit, Timing)),
@@ -281,34 +284,34 @@ compile_and_store(Files, #analysis_state{codeserver = CServer,
{T3, _} = statistics(wall_clock),
Msg2 = io_lib:format("done in ~.2f secs\n", [(T3-T2)/1000]),
send_log(Parent, Msg2),
- {Callgraph, sets:from_list(NoWarn), CServer2}.
+ {Callgraph, CServer2}.
-type compile_init_data() :: #compile_init{}.
-type error_reason() :: string().
--type compile_result() :: {[{file:filename(), error_reason()}], [mfa()],
+-type compile_result() :: {[{file:filename(), error_reason()}],
[module()]}. %%opaque
-type one_file_result() :: {error, error_reason()} |
{ok, [dialyzer_callgraph:callgraph_edge()],
- [mfa_or_funlbl()], [mfa()], module()}. %%opaque
--type compile_mid_data() :: {module(), cerl:cerl(), [mfa()],
+ [mfa_or_funlbl()], module()}. %%opaque
+-type compile_mid_data() :: {module(), cerl:cerl(),
dialyzer_callgraph:callgraph(),
dialyzer_codeserver:codeserver()}.
-spec compile_init_result() -> compile_result().
-compile_init_result() -> {[], [], []}.
+compile_init_result() -> {[], []}.
-spec add_to_result(file:filename(), one_file_result(), compile_result(),
compile_init_data()) -> compile_result().
-add_to_result(File, NewData, {Failed, NoWarn, Mods}, InitData) ->
+add_to_result(File, NewData, {Failed, Mods}, InitData) ->
case NewData of
{error, Reason} ->
- {[{File, Reason}|Failed], NoWarn, Mods};
- {ok, V, E, NewNoWarn, Mod} ->
+ {[{File, Reason}|Failed], Mods};
+ {ok, V, E, Mod} ->
Callgraph = InitData#compile_init.callgraph,
dialyzer_callgraph:add_edges(E, V, Callgraph),
- {Failed, NewNoWarn ++ NoWarn, [Mod|Mods]}
+ {Failed, [Mod|Mods]}
end.
-spec start_compilation(file:filename(), compile_init_data()) ->
@@ -318,12 +321,14 @@ start_compilation(File,
#compile_init{callgraph = Callgraph, codeserver = Codeserver,
defines = Defines, include_dirs = IncludeD,
use_contracts = UseContracts,
+ legal_warnings = LegalWarnings,
start_from = StartFrom}) ->
case StartFrom of
src_code ->
- compile_src(File, IncludeD, Defines, Callgraph, Codeserver, UseContracts);
+ compile_src(File, IncludeD, Defines, Callgraph, Codeserver,
+ UseContracts, LegalWarnings);
byte_code ->
- compile_byte(File, Callgraph, Codeserver, UseContracts)
+ compile_byte(File, Callgraph, Codeserver, UseContracts, LegalWarnings)
end.
cleanup_callgraph(#analysis_state{plt = InitPlt, parent = Parent,
@@ -357,88 +362,86 @@ cleanup_callgraph(#analysis_state{plt = InitPlt, parent = Parent,
end,
Callgraph1.
-compile_src(File, Includes, Defines, Callgraph, CServer, UseContracts) ->
+compile_src(File, Includes, Defines, Callgraph, CServer, UseContracts,
+ LegalWarnings) ->
DefaultIncludes = default_includes(filename:dirname(File)),
SrcCompOpts = dialyzer_utils:src_compiler_opts(),
CompOpts = SrcCompOpts ++ Includes ++ Defines ++ DefaultIncludes,
case dialyzer_utils:get_abstract_code_from_src(File, CompOpts) of
{error, _Msg} = Error -> Error;
{ok, AbstrCode} ->
- compile_common(File, AbstrCode, CompOpts, Callgraph, CServer, UseContracts)
+ compile_common(File, AbstrCode, CompOpts, Callgraph, CServer,
+ UseContracts, LegalWarnings)
end.
-compile_byte(File, Callgraph, CServer, UseContracts) ->
+compile_byte(File, Callgraph, CServer, UseContracts, LegalWarnings) ->
case dialyzer_utils:get_abstract_code_from_beam(File) of
error ->
{error, " Could not get abstract code for: " ++ File ++ "\n" ++
" Recompile with +debug_info or analyze starting from source code"};
{ok, AbstrCode} ->
- compile_byte(File, AbstrCode, Callgraph, CServer, UseContracts)
+ compile_byte(File, AbstrCode, Callgraph, CServer, UseContracts,
+ LegalWarnings)
end.
-compile_byte(File, AbstrCode, Callgraph, CServer, UseContracts) ->
+compile_byte(File, AbstrCode, Callgraph, CServer, UseContracts,
+ LegalWarnings) ->
case dialyzer_utils:get_compile_options_from_beam(File) of
error ->
{error, " Could not get compile options for: " ++ File ++ "\n" ++
" Recompile or analyze starting from source code"};
{ok, CompOpts} ->
- compile_common(File, AbstrCode, CompOpts, Callgraph, CServer, UseContracts)
+ compile_common(File, AbstrCode, CompOpts, Callgraph, CServer,
+ UseContracts, LegalWarnings)
end.
-compile_common(File, AbstrCode, CompOpts, Callgraph, CServer, UseContracts) ->
+compile_common(File, AbstrCode, CompOpts, Callgraph, CServer,
+ UseContracts, LegalWarnings) ->
case dialyzer_utils:get_core_from_abstract_code(AbstrCode, CompOpts) of
error -> {error, " Could not get core Erlang code for: " ++ File};
{ok, Core} ->
Mod = cerl:concrete(cerl:module_name(Core)),
- NoWarn = abs_get_nowarn(AbstrCode, Mod),
case dialyzer_utils:get_record_and_type_info(AbstrCode) of
{error, _} = Error -> Error;
{ok, RecInfo} ->
CServer1 =
dialyzer_codeserver:store_temp_records(Mod, RecInfo, CServer),
+ MetaFunInfo =
+ dialyzer_utils:get_fun_meta_info(Mod, AbstrCode, LegalWarnings),
+ CServer2 =
+ dialyzer_codeserver:insert_fun_meta_info(MetaFunInfo, CServer1),
case UseContracts of
true ->
case dialyzer_utils:get_spec_info(Mod, AbstrCode, RecInfo) of
{error, _} = Error -> Error;
{ok, SpecInfo, CallbackInfo} ->
- CServer2 =
+ CServer3 =
dialyzer_codeserver:store_temp_contracts(Mod, SpecInfo,
CallbackInfo,
- CServer1),
- store_core(Mod, Core, NoWarn, Callgraph, CServer2)
+ CServer2),
+ store_core(Mod, Core, Callgraph, CServer3)
end;
false ->
- store_core(Mod, Core, NoWarn, Callgraph, CServer1)
+ store_core(Mod, Core, Callgraph, CServer2)
end
end
end.
-store_core(Mod, Core, NoWarn, Callgraph, CServer) ->
+store_core(Mod, Core, Callgraph, CServer) ->
Exp = get_exports_from_core(Core),
ExpTypes = get_exported_types_from_core(Core),
CServer = dialyzer_codeserver:insert_exports(Exp, CServer),
CServer = dialyzer_codeserver:insert_temp_exported_types(ExpTypes, CServer),
CoreTree = cerl:from_records(Core),
- {ok, cerl_trees:size(CoreTree), {Mod, CoreTree, NoWarn, Callgraph, CServer}}.
+ CoreSize = cerl_trees:size(CoreTree),
+ {ok, CoreSize, {Mod, CoreTree, Callgraph, CServer}}.
-spec continue_compilation(integer(), compile_mid_data()) -> one_file_result().
-continue_compilation(NextLabel, {Mod, CoreTree, NoWarn, Callgraph, CServer}) ->
+continue_compilation(NextLabel, {Mod, CoreTree, Callgraph, CServer}) ->
{LabeledTree, _NewNextLabel} = cerl_trees:label(CoreTree, NextLabel),
LabeledCore = cerl:to_records(LabeledTree),
- store_code_and_build_callgraph(Mod, LabeledCore, Callgraph, NoWarn, CServer).
-
-abs_get_nowarn(Abs, M) ->
- Opts = lists:flatten([C || {attribute, _, compile, C} <- Abs]),
- Warn = erl_lint:bool_option(warn_unused_function, nowarn_unused_function,
- true, Opts),
- case Warn of
- false ->
- [{M, F, A} || {function, _, F, A, _} <- Abs]; % all functions
- true ->
- [{M, F, A} || {nowarn_unused_function, FAs} <- Opts,
- {F, A} <- lists:flatten([FAs])]
- end.
+ store_code_and_build_callgraph(Mod, LabeledCore, Callgraph, CServer).
get_exported_types_from_core(Core) ->
Attrs = cerl:module_attrs(Core),
@@ -456,11 +459,11 @@ get_exports_from_core(Core) ->
M = cerl:atom_val(cerl:module_name(Tree)),
[{M, F, A} || {F, A} <- Exports2].
-store_code_and_build_callgraph(Mod, Core, Callgraph, NoWarn, CServer) ->
+store_code_and_build_callgraph(Mod, Core, Callgraph, CServer) ->
CoreTree = cerl:from_records(Core),
{Vertices, Edges} = dialyzer_callgraph:scan_core_tree(CoreTree, Callgraph),
CServer = dialyzer_codeserver:insert(Mod, CoreTree, CServer),
- {ok, Vertices, Edges, NoWarn, Mod}.
+ {ok, Vertices, Edges, Mod}.
%%--------------------------------------------------------------------
%% Utilities
@@ -548,10 +551,19 @@ send_warnings(Parent, Warnings) ->
Parent ! {self(), warnings, Warnings},
ok.
-filter_warnings(LegalWarnings, Warnings) ->
- [TIW || {Tag, _Id, _Warning} = TIW <- Warnings,
- ordsets:is_element(Tag, LegalWarnings)].
+filter_warnings(Warnings, Codeserver) ->
+ [TWW || {Tag, WarningInfo, _Warning} = TWW <- Warnings,
+ is_ok_fun(WarningInfo, Codeserver),
+ is_ok_tag(Tag, WarningInfo, Codeserver)].
+
+is_ok_fun({_F, _L, Module}, _Codeserver) when is_atom(Module) ->
+ true;
+is_ok_fun({_Filename, _Line, {_M, _F, _A} = MFA}, Codeserver) ->
+ not dialyzer_utils:is_suppressed_fun(MFA, Codeserver).
+is_ok_tag(Tag, {_F, _L, MorMFA}, Codeserver) ->
+ not dialyzer_utils:is_suppressed_tag(MorMFA, Tag, Codeserver).
+
send_analysis_done(Parent, Plt, DocPlt) ->
Parent ! {self(), done, Plt, DocPlt},
ok.
@@ -573,7 +585,8 @@ send_codeserver_plt(Parent, CServer, Plt ) ->
ok.
send_bad_calls(Parent, BadCalls, CodeServer) ->
- send_warnings(Parent, format_bad_calls(BadCalls, CodeServer, [])).
+ FormatedBadCalls = format_bad_calls(BadCalls, CodeServer, []),
+ send_warnings(Parent, FormatedBadCalls).
send_mod_deps(Parent, ModuleDeps) ->
Parent ! {self(), mod_deps, ModuleDeps},
@@ -585,8 +598,9 @@ format_bad_calls([{{_, _, _}, {_, module_info, A}}|Left], CodeServer, Acc)
format_bad_calls([{FromMFA, {M, F, A} = To}|Left], CodeServer, Acc) ->
{_Var, FunCode} = dialyzer_codeserver:lookup_mfa_code(FromMFA, CodeServer),
Msg = {call_to_missing, [M, F, A]},
- FileLine = find_call_file_and_line(FunCode, To),
- NewAcc = [{?WARN_CALLGRAPH, FileLine, Msg}|Acc],
+ {File, Line} = find_call_file_and_line(FunCode, To),
+ WarningInfo = {File, Line, FromMFA},
+ NewAcc = [{?WARN_CALLGRAPH, WarningInfo, Msg}|Acc],
format_bad_calls(Left, CodeServer, NewAcc);
format_bad_calls([], _CodeServer, Acc) ->
Acc.
diff --git a/lib/dialyzer/src/dialyzer_behaviours.erl b/lib/dialyzer/src/dialyzer_behaviours.erl
index bbedd3201e..19b63bd2c8 100644
--- a/lib/dialyzer/src/dialyzer_behaviours.erl
+++ b/lib/dialyzer/src/dialyzer_behaviours.erl
@@ -52,7 +52,7 @@
-spec check_callbacks(module(), [{cerl:cerl(), cerl:cerl()}], rectab(),
dialyzer_plt:plt(),
- dialyzer_codeserver:codeserver()) -> [dial_warning()].
+ dialyzer_codeserver:codeserver()) -> [raw_warning()].
check_callbacks(Module, Attrs, Records, Plt, Codeserver) ->
{Behaviours, BehLines} = get_behaviours(Attrs),
@@ -65,7 +65,7 @@ check_callbacks(Module, Attrs, Records, Plt, Codeserver) ->
State = #state{plt = Plt, filename = File, behlines = BehLines,
codeserver = Codeserver, records = Records},
Warnings = get_warnings(Module, Behaviours, State),
- [add_tag_file_line(Module, W, State) || W <- Warnings]
+ [add_tag_warning_info(Module, W, State) || W <- Warnings]
end.
%%--------------------------------------------------------------------
@@ -193,7 +193,7 @@ find_mismatching_args(Kind, [Type|Rest], [CbType|CbRest], Behaviour,
Arity, Records, N+1, NewAcc)
end.
-add_tag_file_line(_Module, {Tag, [B|_R]} = Warn, State)
+add_tag_warning_info(Module, {Tag, [B|_R]} = Warn, State)
when Tag =:= callback_missing;
Tag =:= callback_info_missing ->
{B, Line} = lists:keyfind(B, 1, State#state.behlines),
@@ -202,18 +202,18 @@ add_tag_file_line(_Module, {Tag, [B|_R]} = Warn, State)
callback_missing -> ?WARN_BEHAVIOUR;
callback_info_missing -> ?WARN_UNDEFINED_CALLBACK
end,
- {Category, {State#state.filename, Line}, Warn};
-add_tag_file_line(_Module, {Tag, [File, Line|R]}, _State)
+ {Category, {State#state.filename, Line, Module}, Warn};
+add_tag_warning_info(Module, {Tag, [File, Line|R]}, _State)
when Tag =:= callback_spec_type_mismatch;
Tag =:= callback_spec_arg_type_mismatch ->
- {?WARN_BEHAVIOUR, {File, Line}, {Tag, R}};
-add_tag_file_line(Module, {_Tag, [_B, Fun, Arity|_R]} = Warn, State) ->
+ {?WARN_BEHAVIOUR, {File, Line, Module}, {Tag, R}};
+add_tag_warning_info(Module, {_Tag, [_B, Fun, Arity|_R]} = Warn, State) ->
{_A, FunCode} =
dialyzer_codeserver:lookup_mfa_code({Module, Fun, Arity},
State#state.codeserver),
Anns = cerl:get_ann(FunCode),
- FileLine = {get_file(Anns), get_line(Anns)},
- {?WARN_BEHAVIOUR, FileLine, Warn}.
+ WarningInfo = {get_file(Anns), get_line(Anns), {Module, Fun, Arity}},
+ {?WARN_BEHAVIOUR, WarningInfo, Warn}.
get_line([Line|_]) when is_integer(Line) -> Line;
get_line([_|Tail]) -> get_line(Tail);
diff --git a/lib/dialyzer/src/dialyzer_cl.erl b/lib/dialyzer/src/dialyzer_cl.erl
index 3e7d9dfa99..debb78bd0b 100644
--- a/lib/dialyzer/src/dialyzer_cl.erl
+++ b/lib/dialyzer/src/dialyzer_cl.erl
@@ -48,7 +48,7 @@
plt_info = none :: 'none' | dialyzer_plt:plt_info(),
report_mode = normal :: rep_mode(),
return_status= ?RET_NOTHING_SUSPICIOUS :: dial_ret(),
- stored_warnings = [] :: [dial_warning()],
+ stored_warnings = [] :: [raw_warning()],
unknown_behaviours = [] :: [dialyzer_behaviours:behaviour()]
}).
@@ -627,7 +627,7 @@ format_log_cache(LogCache) ->
Str = lists:append(lists:reverse(LogCache)),
string:join(string:tokens(Str, "\n"), "\n ").
--spec store_warnings(#cl_state{}, [dial_warning()]) -> #cl_state{}.
+-spec store_warnings(#cl_state{}, [raw_warning()]) -> #cl_state{}.
store_warnings(#cl_state{stored_warnings = StoredWarnings} = St, Warnings) ->
St#cl_state{stored_warnings = StoredWarnings ++ Warnings}.
@@ -685,16 +685,22 @@ return_value(State = #cl_state{erlang_mode = ErlangMode,
unknown_behaviours(State);
false -> []
end,
+ WarningInfo = {_Filename = "", _Line = 0, _MorMFA = ''},
UnknownWarnings =
- [{?WARN_UNKNOWN, {_Filename = "", _Line = 0}, W} || W <- Unknown],
+ [{?WARN_UNKNOWN, WarningInfo, W} || W <- Unknown],
AllWarnings =
UnknownWarnings ++ process_warnings(StoredWarnings),
- {RetValue, AllWarnings}
+ {RetValue, set_warning_id(AllWarnings)}
end.
unknown_functions(#cl_state{external_calls = Calls}) ->
[{unknown_function, MFA} || MFA <- Calls].
+set_warning_id(Warnings) ->
+ lists:map(fun({Tag, {File, Line, _MorMFA}, Msg}) ->
+ {Tag, {File, Line}, Msg}
+ end, Warnings).
+
print_ext_calls(#cl_state{report_mode = quiet}) ->
ok;
print_ext_calls(#cl_state{output = Output,
@@ -817,15 +823,16 @@ print_warnings(#cl_state{output = Output,
formatted ->
[dialyzer:format_warning(W, FOpt) || W <- PrWarnings];
raw ->
- [io_lib:format("~p. \n", [W]) || W <- PrWarnings]
+ [io_lib:format("~p. \n",
+ [W]) || W <- set_warning_id(PrWarnings)]
end,
io:format(Output, "\n~s", [S])
end.
--spec process_warnings([dial_warning()]) -> [dial_warning()].
+-spec process_warnings([raw_warning()]) -> [raw_warning()].
process_warnings(Warnings) ->
- Warnings1 = lists:keysort(2, Warnings), %% Sort on file/line
+ Warnings1 = lists:keysort(2, Warnings), %% Sort on file/line (and m/mfa..)
remove_duplicate_warnings(Warnings1, []).
remove_duplicate_warnings([Duplicate, Duplicate|Left], Acc) ->
diff --git a/lib/dialyzer/src/dialyzer_codeserver.erl b/lib/dialyzer/src/dialyzer_codeserver.erl
index 593e71f30b..e0add00061 100644
--- a/lib/dialyzer/src/dialyzer_codeserver.erl
+++ b/lib/dialyzer/src/dialyzer_codeserver.erl
@@ -43,19 +43,21 @@
insert/3,
insert_exports/2,
insert_temp_exported_types/2,
+ insert_fun_meta_info/2,
is_exported/2,
lookup_mod_code/2,
lookup_mfa_code/2,
lookup_mod_records/2,
lookup_mod_contracts/2,
lookup_mfa_contract/2,
+ lookup_meta_info/2,
new/0,
set_next_core_label/2,
set_temp_records/2,
store_temp_records/3,
store_temp_contracts/4]).
--export_type([codeserver/0]).
+-export_type([codeserver/0, fun_meta_info/0]).
-include("dialyzer.hrl").
@@ -70,12 +72,19 @@
-type contracts() :: dict:dict(mfa(),dialyzer_contracts:file_contract()).
-type mod_contracts() :: dict:dict(module(), contracts()).
+%% A property-list of data compiled from -compile and -dialyzer attributes.
+-type meta_info() :: [{{'nowarn_function' | dial_warn_tag()},
+ 'mod' | 'func'}].
+-type fun_meta_info() :: [{mfa(), meta_info()}
+ | {module(), [dial_warn_tag()]}].
+
-record(codeserver, {next_core_label = 0 :: label(),
code :: dict_ets(),
exported_types :: set_ets(), % set(mfa())
records :: dict_ets(),
contracts :: dict_ets(),
callbacks :: dict_ets(),
+ fun_meta_info :: dict_ets(), % {mfa(), meta_info()}
exports :: 'clean' | set_ets(), % set(mfa())
temp_exported_types :: 'clean' | set_ets(), % set(mfa())
temp_records :: 'clean' | dict_ets(),
@@ -129,14 +138,17 @@ new() ->
CodeOptions = [compressed, public, {read_concurrency, true}],
Code = ets:new(dialyzer_codeserver_code, CodeOptions),
TempOptions = [public, {write_concurrency, true}],
- [Exports, TempExportedTypes, TempRecords, TempContracts, TempCallbacks] =
+ [Exports, FunMetaInfo, TempExportedTypes, TempRecords, TempContracts,
+ TempCallbacks] =
[ets:new(Name, TempOptions) ||
Name <-
- [dialyzer_codeserver_exports, dialyzer_codeserver_temp_exported_types,
+ [dialyzer_codeserver_exports, dialyzer_codeserver_fun_meta_info,
+ dialyzer_codeserver_temp_exported_types,
dialyzer_codeserver_temp_records, dialyzer_codeserver_temp_contracts,
dialyzer_codeserver_temp_callbacks]],
#codeserver{code = Code,
exports = Exports,
+ fun_meta_info = FunMetaInfo,
temp_exported_types = TempExportedTypes,
temp_records = TempRecords,
temp_contracts = TempContracts,
@@ -184,6 +196,12 @@ insert_exports(List, #codeserver{exports = Exports} = CS) ->
true = ets_set_insert_list(List, Exports),
CS.
+-spec insert_fun_meta_info(fun_meta_info(), codeserver()) -> codeserver().
+
+insert_fun_meta_info(List, #codeserver{fun_meta_info = FunMetaInfo} = CS) ->
+ true = ets:insert(FunMetaInfo, List),
+ CS.
+
-spec is_exported(mfa(), codeserver()) -> boolean().
is_exported(MFA, #codeserver{exports = Exports}) ->
@@ -290,6 +308,14 @@ get_file_contract(Key, ContDict) ->
lookup_mfa_contract(MFA, #codeserver{contracts = ContDict}) ->
ets_dict_find(MFA, ContDict).
+-spec lookup_meta_info(module() | mfa(), codeserver()) -> meta_info().
+
+lookup_meta_info(MorMFA, #codeserver{fun_meta_info = FunMetaInfo}) ->
+ case ets_dict_find(MorMFA, FunMetaInfo) of
+ error -> [];
+ {ok, PropList} -> PropList
+ end.
+
-spec get_contracts(codeserver()) -> mod_contracts().
get_contracts(#codeserver{contracts = ContDict}) ->
diff --git a/lib/dialyzer/src/dialyzer_contracts.erl b/lib/dialyzer/src/dialyzer_contracts.erl
index ee147ca102..39a178cb7d 100644
--- a/lib/dialyzer/src/dialyzer_contracts.erl
+++ b/lib/dialyzer/src/dialyzer_contracts.erl
@@ -351,7 +351,7 @@ solve_constraints(Contract, Call, Constraints) ->
%% Checks the contracts for functions that are not implemented
-spec contracts_without_fun(contracts(), [_], dialyzer_callgraph:callgraph()) ->
- [dial_warning()].
+ [raw_warning()].
contracts_without_fun(Contracts, AllFuns0, Callgraph) ->
AllFuns1 = [{dialyzer_callgraph:lookup_name(Label, Callgraph), Arity}
@@ -362,8 +362,9 @@ contracts_without_fun(Contracts, AllFuns0, Callgraph) ->
[warn_spec_missing_fun(MFA, Contracts) || MFA <- ErrorContractMFAs].
warn_spec_missing_fun({M, F, A} = MFA, Contracts) ->
- {FileLine, _Contract, _Xtra} = dict:fetch(MFA, Contracts),
- {?WARN_CONTRACT_SYNTAX, FileLine, {spec_missing_fun, [M, F, A]}}.
+ {{File, Line}, _Contract, _Xtra} = dict:fetch(MFA, Contracts),
+ WarningInfo = {File, Line, MFA},
+ {?WARN_CONTRACT_SYNTAX, WarningInfo, {spec_missing_fun, [M, F, A]}}.
%% This treats the "when" constraints. It will be extended, we hope.
insert_constraints([{subtype, Type1, Type2}|Left], Dict) ->
@@ -585,7 +586,7 @@ general_domain([], AccSig) ->
-spec get_invalid_contract_warnings([module()],
dialyzer_codeserver:codeserver(),
dialyzer_plt:plt(),
- opaques_fun()) -> [dial_warning()].
+ opaques_fun()) -> [raw_warning()].
get_invalid_contract_warnings(Modules, CodeServer, Plt, FindOpaques) ->
get_invalid_contract_warnings_modules(Modules, CodeServer, Plt, FindOpaques, []).
@@ -609,12 +610,14 @@ get_invalid_contract_warnings_funs([{MFA, {FileLine, Contract, _Xtra}}|Left],
Sig = erl_types:t_fun(Args, Ret),
{M, _F, _A} = MFA,
Opaques = FindOpaques(M),
+ {File, Line} = FileLine,
+ WarningInfo = {File, Line, MFA},
NewAcc =
case check_contract(Contract, Sig, Opaques) of
{error, invalid_contract} ->
- [invalid_contract_warning(MFA, FileLine, Sig, RecDict)|Acc];
+ [invalid_contract_warning(MFA, WarningInfo, Sig, RecDict)|Acc];
{error, {overlapping_contract, []}} ->
- [overlapping_contract_warning(MFA, FileLine)|Acc];
+ [overlapping_contract_warning(MFA, WarningInfo)|Acc];
{error, {extra_range, ExtraRanges, STRange}} ->
Warn =
case t_from_forms_without_remote(Contract#contract.forms,
@@ -627,12 +630,12 @@ get_invalid_contract_warnings_funs([{MFA, {FileLine, Contract, _Xtra}}|Left],
end,
case Warn of
true ->
- [extra_range_warning(MFA, FileLine, ExtraRanges, STRange)|Acc];
+ [extra_range_warning(MFA, WarningInfo, ExtraRanges, STRange)|Acc];
false ->
Acc
end;
{error, Msg} ->
- [{?WARN_CONTRACT_SYNTAX, FileLine, Msg}|Acc];
+ [{?WARN_CONTRACT_SYNTAX, WarningInfo, Msg}|Acc];
ok ->
{M, F, A} = MFA,
CSig0 = get_contract_signature(Contract),
@@ -646,14 +649,14 @@ get_invalid_contract_warnings_funs([{MFA, {FileLine, Contract, _Xtra}}|Left],
BifSig = erl_types:t_fun(BifArgs, BifRet),
case check_contract(Contract, BifSig, Opaques) of
{error, _} ->
- [invalid_contract_warning(MFA, FileLine, BifSig, RecDict)
+ [invalid_contract_warning(MFA, WarningInfo, BifSig, RecDict)
|Acc];
ok ->
- picky_contract_check(CSig, BifSig, MFA, FileLine,
+ picky_contract_check(CSig, BifSig, MFA, WarningInfo,
Contract, RecDict, Acc)
end;
false ->
- picky_contract_check(CSig, Sig, MFA, FileLine, Contract,
+ picky_contract_check(CSig, Sig, MFA, WarningInfo, Contract,
RecDict, Acc)
end
end,
@@ -662,20 +665,20 @@ get_invalid_contract_warnings_funs([{MFA, {FileLine, Contract, _Xtra}}|Left],
get_invalid_contract_warnings_funs([], _Plt, _RecDict, _FindOpaques, Acc) ->
Acc.
-invalid_contract_warning({M, F, A}, FileLine, SuccType, RecDict) ->
+invalid_contract_warning({M, F, A}, WarningInfo, SuccType, RecDict) ->
SuccTypeStr = dialyzer_utils:format_sig(SuccType, RecDict),
- {?WARN_CONTRACT_TYPES, FileLine, {invalid_contract, [M, F, A, SuccTypeStr]}}.
+ {?WARN_CONTRACT_TYPES, WarningInfo, {invalid_contract, [M, F, A, SuccTypeStr]}}.
-overlapping_contract_warning({M, F, A}, FileLine) ->
- {?WARN_CONTRACT_TYPES, FileLine, {overlapping_contract, [M, F, A]}}.
+overlapping_contract_warning({M, F, A}, WarningInfo) ->
+ {?WARN_CONTRACT_TYPES, WarningInfo, {overlapping_contract, [M, F, A]}}.
-extra_range_warning({M, F, A}, FileLine, ExtraRanges, STRange) ->
+extra_range_warning({M, F, A}, WarningInfo, ExtraRanges, STRange) ->
ERangesStr = erl_types:t_to_string(ExtraRanges),
STRangeStr = erl_types:t_to_string(STRange),
- {?WARN_CONTRACT_SUPERTYPE, FileLine,
+ {?WARN_CONTRACT_SUPERTYPE, WarningInfo,
{extra_range, [M, F, A, ERangesStr, STRangeStr]}}.
-picky_contract_check(CSig0, Sig0, MFA, FileLine, Contract, RecDict, Acc) ->
+picky_contract_check(CSig0, Sig0, MFA, WarningInfo, Contract, RecDict, Acc) ->
CSig = erl_types:t_abstract_records(CSig0, RecDict),
Sig = erl_types:t_abstract_records(Sig0, RecDict),
case erl_types:t_is_equal(CSig, Sig) of
@@ -685,7 +688,7 @@ picky_contract_check(CSig0, Sig0, MFA, FileLine, Contract, RecDict, Acc) ->
erl_types:t_is_unit(erl_types:t_fun_range(CSig))) of
true -> Acc;
false ->
- case extra_contract_warning(MFA, FileLine, Contract,
+ case extra_contract_warning(MFA, WarningInfo, Contract,
CSig0, Sig0, RecDict) of
no_warning -> Acc;
{warning, Warning} -> [Warning|Acc]
@@ -693,7 +696,7 @@ picky_contract_check(CSig0, Sig0, MFA, FileLine, Contract, RecDict, Acc) ->
end
end.
-extra_contract_warning({M, F, A}, FileLine, Contract, CSig, Sig, RecDict) ->
+extra_contract_warning({M, F, A}, WarningInfo, Contract, CSig, Sig, RecDict) ->
%% We do not want to depend upon erl_types:t_to_string() possibly
%% hiding the contents of opaque types.
SigUnopaque = erl_types:t_unopaque(Sig),
@@ -724,7 +727,7 @@ extra_contract_warning({M, F, A}, FileLine, Contract, CSig, Sig, RecDict) ->
{?WARN_CONTRACT_NOT_EQUAL,
{contract_diff, [M, F, A, ContractString, SigString]}}
end,
- {warning, {Tag, FileLine, Msg}}
+ {warning, {Tag, WarningInfo, Msg}}
end.
is_remote_types_related(Contract, CSig, Sig, RecDict) ->
diff --git a/lib/dialyzer/src/dialyzer_dataflow.erl b/lib/dialyzer/src/dialyzer_dataflow.erl
index 46467a1303..ea1b09fcdd 100644
--- a/lib/dialyzer/src/dialyzer_dataflow.erl
+++ b/lib/dialyzer/src/dialyzer_dataflow.erl
@@ -28,14 +28,15 @@
-module(dialyzer_dataflow).
--export([get_fun_types/4, get_warnings/5, format_args/3]).
+-export([get_fun_types/5, get_warnings/5, format_args/3]).
%% Data structure interfaces.
-export([state__add_warning/2, state__cleanup/1,
state__duplicate/1, dispose_state/1,
state__get_callgraph/1, state__get_races/1,
state__get_records/1, state__put_callgraph/2,
- state__put_races/2, state__records_only/1]).
+ state__put_races/2, state__records_only/1,
+ state__find_function/2]).
-export_type([state/0]).
@@ -89,6 +90,8 @@
-type type() :: erl_types:erl_type().
-type types() :: erl_types:type_table().
+-type curr_fun() :: 'undefined' | 'top' | mfa_or_funlbl().
+
-define(no_arg, no_arg).
-define(TYPE_LIMIT, 3).
@@ -96,17 +99,20 @@
-define(BITS, 128).
-record(state, {callgraph :: dialyzer_callgraph:callgraph(),
+ codeserver :: dialyzer_codeserver:codeserver(),
envs :: env_tab(),
fun_tab :: fun_tab(),
+ fun_homes :: dict:dict(label(), mfa()),
plt :: dialyzer_plt:plt(),
opaques :: [type()],
races = dialyzer_races:new() :: dialyzer_races:races(),
records = dict:new() :: types(),
tree_map :: dict:dict(label(), cerl:cerl()),
warning_mode = false :: boolean(),
- warnings = [] :: [dial_warning()],
+ warnings = [] :: [raw_warning()],
work :: {[_], [_], sets:set()},
- module :: module()
+ module :: module(),
+ curr_fun :: curr_fun()
}).
-record(map, {dict = dict:new() :: type_tab(),
@@ -115,7 +121,6 @@
modified_stack = [] :: [{[Key :: term()],reference()}],
ref = undefined :: reference() | undefined}).
--type nowarn() :: dialyzer_analysis_callgraph:no_warn_unused().
-type env_tab() :: dict:dict(label(), #map{}).
-type fun_entry() :: {Args :: [type()], RetType :: type()}.
-type fun_tab() :: dict:dict('top' | label(),
@@ -133,22 +138,24 @@
-type fun_types() :: dict:dict(label(), type()).
-spec get_warnings(cerl:c_module(), dialyzer_plt:plt(),
- dialyzer_callgraph:callgraph(), types(), nowarn()) ->
- {[dial_warning()], fun_types()}.
-
-get_warnings(Tree, Plt, Callgraph, Records, NoWarnUnused) ->
- State1 = analyze_module(Tree, Plt, Callgraph, Records, true),
- State2 =
- state__renew_warnings(state__get_warnings(State1, NoWarnUnused), State1),
+ dialyzer_callgraph:callgraph(),
+ dialyzer_codeserver:codeserver(),
+ types()) ->
+ {[raw_warning()], fun_types()}.
+
+get_warnings(Tree, Plt, Callgraph, Codeserver, Records) ->
+ State1 = analyze_module(Tree, Plt, Callgraph, Codeserver, Records, true),
+ State2 = state__renew_warnings(state__get_warnings(State1), State1),
State3 = state__get_race_warnings(State2),
{State3#state.warnings, state__all_fun_types(State3)}.
-spec get_fun_types(cerl:c_module(), dialyzer_plt:plt(),
dialyzer_callgraph:callgraph(),
+ dialyzer_codeserver:codeserver(),
types()) -> fun_types().
-get_fun_types(Tree, Plt, Callgraph, Records) ->
- State = analyze_module(Tree, Plt, Callgraph, Records, false),
+get_fun_types(Tree, Plt, Callgraph, Codeserver, Records) ->
+ State = analyze_module(Tree, Plt, Callgraph, Codeserver, Records, false),
state__all_fun_types(State).
%%% ===========================================================================
@@ -157,11 +164,11 @@ get_fun_types(Tree, Plt, Callgraph, Records) ->
%%%
%%% ===========================================================================
-analyze_module(Tree, Plt, Callgraph, Records, GetWarnings) ->
+analyze_module(Tree, Plt, Callgraph, Codeserver, Records, GetWarnings) ->
debug_pp(Tree, false),
Module = cerl:atom_val(cerl:module_name(Tree)),
TopFun = cerl:ann_c_fun([{label, top}], [], Tree),
- State = state__new(Callgraph, TopFun, Plt, Module, Records),
+ State = state__new(Callgraph, Codeserver, TopFun, Plt, Module, Records),
State1 = state__race_analysis(not GetWarnings, State),
State2 = analyze_loop(State1),
case GetWarnings of
@@ -175,25 +182,26 @@ analyze_module(Tree, Plt, Callgraph, Records, GetWarnings) ->
analyze_loop(State) ->
case state__get_work(State) of
- none -> State;
- {Fun, NewState1} ->
+ none -> state__set_curr_fun(undefined, State);
+ {Fun, NewState0} ->
+ NewState1 = state__set_curr_fun(get_label(Fun), NewState0),
{ArgTypes, IsCalled} = state__get_args_and_status(Fun, NewState1),
case not IsCalled of
true ->
?debug("Not handling (not called) ~w: ~s\n",
- [state__lookup_name(get_label(Fun), State),
+ [NewState1#state.curr_fun,
t_to_string(t_product(ArgTypes))]),
analyze_loop(NewState1);
false ->
case state__fun_env(Fun, NewState1) of
none ->
?debug("Not handling (no env) ~w: ~s\n",
- [state__lookup_name(get_label(Fun), State),
+ [NewState1#state.curr_fun,
t_to_string(t_product(ArgTypes))]),
analyze_loop(NewState1);
Map ->
?debug("Handling fun ~p: ~s\n",
- [state__lookup_name(get_label(Fun), State),
+ [NewState1#state.curr_fun,
t_to_string(state__fun_type(Fun, NewState1))]),
Vars = cerl:fun_vars(Fun),
Map1 = enter_type_lists(Vars, ArgTypes, Map),
@@ -212,7 +220,7 @@ analyze_loop(State) ->
{NewState4, _Map2, BodyType} =
traverse(Body, Map1, NewState3),
?debug("Done analyzing: ~w:~s\n",
- [state__lookup_name(get_label(Fun), State),
+ [NewState1#state.curr_fun,
t_to_string(t_fun(ArgTypes, BodyType))]),
NewState5 =
case IsRaceAnalysisEnabled of
@@ -2780,9 +2788,9 @@ filter_match_fail([]) ->
%%%
%%% ===========================================================================
-state__new(Callgraph, Tree, Plt, Module, Records) ->
+state__new(Callgraph, Codeserver, Tree, Plt, Module, Records) ->
Opaques = erl_types:t_opaque_from_records(Records),
- TreeMap = build_tree_map(Tree),
+ {TreeMap, FunHomes} = build_tree_map(Tree, Callgraph),
Funs = dict:fetch_keys(TreeMap),
FunTab = init_fun_tab(Funs, dict:new(), TreeMap, Callgraph, Plt),
ExportedFuns =
@@ -2790,7 +2798,8 @@ state__new(Callgraph, Tree, Plt, Module, Records) ->
Work = init_work(ExportedFuns),
Env = lists:foldl(fun(Fun, Env) -> dict:store(Fun, map__new(), Env) end,
dict:new(), Funs),
- #state{callgraph = Callgraph, envs = Env, fun_tab = FunTab, opaques = Opaques,
+ #state{callgraph = Callgraph, codeserver = Codeserver,
+ envs = Env, fun_tab = FunTab, fun_homes = FunHomes, opaques = Opaques,
plt = Plt, races = dialyzer_races:new(), records = Records,
warning_mode = false, warnings = [], work = Work, tree_map = TreeMap,
module = Module}.
@@ -2829,7 +2838,7 @@ state__renew_race_list(RaceList, RaceListSize,
state__renew_warnings(Warnings, State) ->
State#state{warnings = Warnings}.
--spec state__add_warning(dial_warning(), state()) -> state().
+-spec state__add_warning(raw_warning(), state()) -> state().
state__add_warning(Warn, #state{warnings = Warnings} = State) ->
State#state{warnings = [Warn|Warnings]}.
@@ -2844,29 +2853,45 @@ state__add_warning(#state{warnings = Warnings, warning_mode = true} = State,
Ann = cerl:get_ann(Tree),
case Force of
true ->
- Warn = {Tag, {get_file(Ann), abs(get_line(Ann))}, Msg},
+ WarningInfo = {get_file(Ann),
+ abs(get_line(Ann)),
+ State#state.curr_fun},
+ Warn = {Tag, WarningInfo, Msg},
?debug("MSG ~s\n", [dialyzer:format_warning(Warn)]),
State#state{warnings = [Warn|Warnings]};
false ->
case is_compiler_generated(Ann) of
- true -> State;
- false ->
- Warn = {Tag, {get_file(Ann), get_line(Ann)}, Msg},
+ true -> State;
+ false ->
+ WarningInfo = {get_file(Ann), get_line(Ann), State#state.curr_fun},
+ Warn = {Tag, WarningInfo, Msg},
?debug("MSG ~s\n", [dialyzer:format_warning(Warn)]),
- State#state{warnings = [Warn|Warnings]}
+ State#state{warnings = [Warn|Warnings]}
end
end.
+-spec state__set_curr_fun(curr_fun(), state()) -> state().
+
+state__set_curr_fun(undefined, State) ->
+ State#state{curr_fun = undefined};
+state__set_curr_fun(FunLbl, State) ->
+ State#state{curr_fun = find_function(FunLbl, State)}.
+
+-spec state__find_function(mfa_or_funlbl(), state()) -> mfa_or_funlbl().
+
+state__find_function(FunLbl, State) ->
+ find_function(FunLbl, State).
+
state__get_race_warnings(#state{races = Races} = State) ->
{Races1, State1} = dialyzer_races:get_race_warnings(Races, State),
State1#state{races = Races1}.
state__get_warnings(#state{tree_map = TreeMap, fun_tab = FunTab,
- callgraph = Callgraph, plt = Plt} = State,
- NoWarnUnused) ->
+ callgraph = Callgraph, plt = Plt} = State) ->
FoldFun =
fun({top, _}, AccState) -> AccState;
({FunLbl, Fun}, AccState) ->
+ AccState1 = state__set_curr_fun(FunLbl, AccState),
{NotCalled, Ret} =
case dict:fetch(get_label(Fun), FunTab) of
{not_handled, {_Args0, Ret0}} -> {true, Ret0};
@@ -2874,17 +2899,12 @@ state__get_warnings(#state{tree_map = TreeMap, fun_tab = FunTab,
end,
case NotCalled of
true ->
- {Warn, Msg} =
- case dialyzer_callgraph:lookup_name(FunLbl, Callgraph) of
- error -> {false, {}};
- {ok, {_M, F, A} = MFA} ->
- {not sets:is_element(MFA, NoWarnUnused),
- {unused_fun, [F, A]}}
- end,
- case Warn of
- true -> state__add_warning(AccState, ?WARN_NOT_CALLED, Fun, Msg);
- false -> AccState
- end;
+ case dialyzer_callgraph:lookup_name(FunLbl, Callgraph) of
+ error -> AccState1;
+ {ok, {_M, F, A}} ->
+ Msg = {unused_fun, [F, A]},
+ state__add_warning(AccState1, ?WARN_NOT_CALLED, Fun, Msg)
+ end;
false ->
{Name, Contract} =
case dialyzer_callgraph:lookup_name(FunLbl, Callgraph) of
@@ -2897,7 +2917,7 @@ state__get_warnings(#state{tree_map = TreeMap, fun_tab = FunTab,
%% Check if the function has a contract that allows this.
Warn =
case Contract of
- none -> not parent_allows_this(FunLbl, State);
+ none -> not parent_allows_this(FunLbl, AccState1);
{value, C} ->
GenRet = dialyzer_contracts:get_contract_return(C),
not t_is_unit(GenRet)
@@ -2907,19 +2927,19 @@ state__get_warnings(#state{tree_map = TreeMap, fun_tab = FunTab,
case classify_returns(Fun) of
no_match ->
Msg = {no_return, [no_match|Name]},
- state__add_warning(AccState, ?WARN_RETURN_NO_RETURN,
+ state__add_warning(AccState1, ?WARN_RETURN_NO_RETURN,
Fun, Msg);
only_explicit ->
Msg = {no_return, [only_explicit|Name]},
- state__add_warning(AccState, ?WARN_RETURN_ONLY_EXIT,
+ state__add_warning(AccState1, ?WARN_RETURN_ONLY_EXIT,
Fun, Msg);
only_normal ->
Msg = {no_return, [only_normal|Name]},
- state__add_warning(AccState, ?WARN_RETURN_NO_RETURN,
+ state__add_warning(AccState1, ?WARN_RETURN_NO_RETURN,
Fun, Msg);
both ->
Msg = {no_return, [both|Name]},
- state__add_warning(AccState, ?WARN_RETURN_NO_RETURN,
+ state__add_warning(AccState1, ?WARN_RETURN_NO_RETURN,
Fun, Msg)
end;
false ->
@@ -2970,17 +2990,31 @@ state__get_args_and_status(Tree, #state{fun_tab = FunTab}) ->
{ok, {ArgTypes, _}} -> {ArgTypes, true}
end.
-build_tree_map(Tree) ->
+build_tree_map(Tree, Callgraph) ->
Fun =
- fun(T, Dict) ->
+ fun(T, {Dict, Homes, FunLbls} = Acc) ->
case cerl:is_c_fun(T) of
true ->
- dict:store(get_label(T), T, Dict);
+ FunLbl = get_label(T),
+ Dict1 = dict:store(FunLbl, T, Dict),
+ case catch dialyzer_callgraph:lookup_name(FunLbl, Callgraph) of
+ {ok, MFA} ->
+ F2 =
+ fun(Lbl, Dict0) ->
+ dict:store(Lbl, MFA, Dict0)
+ end,
+ Homes1 = lists:foldl(F2, Homes, [FunLbl|FunLbls]),
+ {Dict1, Homes1, []};
+ _ ->
+ {Dict1, Homes, [FunLbl|FunLbls]}
+ end;
false ->
- Dict
+ Acc
end
end,
- cerl_trees:fold(Fun, dict:new(), Tree).
+ Dict0 = dict:new(),
+ {Dict, Homes, _} = cerl_trees:fold(Fun, {Dict0, Dict0, []}, Tree),
+ {Dict, Homes}.
init_fun_tab([top|Left], Dict, TreeMap, Callgraph, Plt) ->
NewDict = dict:store(top, {[], t_none()}, Dict),
@@ -3438,6 +3472,13 @@ parent_allows_this(FunLbl, #state{callgraph = Callgraph, plt = Plt} =State) ->
end
end.
+find_function({_, _, _} = MFA, _State) ->
+ MFA;
+find_function(top, _State) ->
+ top;
+find_function(FunLbl, #state{fun_homes = Homes}) ->
+ dict:fetch(FunLbl, Homes).
+
classify_returns(Tree) ->
case find_terminals(cerl:fun_body(Tree)) of
{false, false} -> no_match;
diff --git a/lib/dialyzer/src/dialyzer_options.erl b/lib/dialyzer/src/dialyzer_options.erl
index a92b8b1958..20971f1407 100644
--- a/lib/dialyzer/src/dialyzer_options.erl
+++ b/lib/dialyzer/src/dialyzer_options.erl
@@ -28,7 +28,7 @@
-module(dialyzer_options).
--export([build/1]).
+-export([build/1, build_warnings/2]).
-include("dialyzer.hrl").
@@ -270,7 +270,7 @@ assert_solvers([v2|Terms]) ->
assert_solvers([Term|_]) ->
bad_option("Illegal value for solver", Term).
--spec build_warnings([atom()], [dial_warning()]) -> [dial_warning()].
+-spec build_warnings([atom()], dial_warn_tags()) -> dial_warn_tags().
build_warnings([Opt|Opts], Warnings) ->
NewWarnings =
diff --git a/lib/dialyzer/src/dialyzer_races.erl b/lib/dialyzer/src/dialyzer_races.erl
index 2a8aba5d8f..48eb331239 100644
--- a/lib/dialyzer/src/dialyzer_races.erl
+++ b/lib/dialyzer/src/dialyzer_races.erl
@@ -85,9 +85,9 @@
-type race_tag() :: 'whereis_register' | 'whereis_unregister'
| 'ets_lookup_insert' | 'mnesia_dirty_read_write'.
-%% The following type is similar to the dial_warning() type but has a
+%% The following type is similar to the raw_warning() type but has a
%% tag which is local to this module and is not propagated to outside
--type dial_race_warning() :: {race_warn_tag(), file_line(), {atom(), [term()]}}.
+-type dial_race_warning() :: {race_warn_tag(), warning_info(), {atom(), [term()]}}.
-type race_warn_tag() :: ?WARN_WHEREIS_REGISTER | ?WARN_WHEREIS_UNREGISTER
| ?WARN_ETS_LOOKUP_INSERT | ?WARN_MNESIA_DIRTY_READ_WRITE.
@@ -312,10 +312,13 @@ race(State) ->
DepList = fixup_race_list(RaceWarnTag, VarArgs, State1),
{State2, RaceWarn} =
get_race_warn(Fun, Args, ArgTypes, DepList, State),
+ {File, Line} = FileLine,
+ CurrMFA = dialyzer_dataflow:state__find_function(CurrFun, State),
+ WarningInfo = {File, Line, CurrMFA},
race(
state__add_race_warning(
state__renew_race_tags(T, State2), RaceWarn, RaceWarnTag,
- FileLine))
+ WarningInfo))
end,
state__renew_race_tags([], RetState).
@@ -2324,7 +2327,7 @@ get_race_warnings_helper(Warnings, State) ->
[] ->
{dialyzer_dataflow:state__get_races(State), State};
[H|T] ->
- {RaceWarnTag, FileLine, {race_condition, [M, F, A, AT, S, DepList]}} = H,
+ {RaceWarnTag, WarningInfo, {race_condition, [M, F, A, AT, S, DepList]}} = H,
Reason =
case RaceWarnTag of
?WARN_WHEREIS_REGISTER ->
@@ -2347,7 +2350,7 @@ get_race_warnings_helper(Warnings, State) ->
"caused by its combination with ")
end,
W =
- {?WARN_RACE_CONDITION, FileLine,
+ {?WARN_RACE_CONDITION, WarningInfo,
{race_condition,
[M, F, dialyzer_dataflow:format_args(A, AT, S), Reason]}},
get_race_warnings_helper(T,
@@ -2377,12 +2380,12 @@ get_reason(DependencyList, Reason) ->
end
end.
-state__add_race_warning(State, RaceWarn, RaceWarnTag, FileLine) ->
+state__add_race_warning(State, RaceWarn, RaceWarnTag, WarningInfo) ->
case RaceWarn of
no_race -> State;
_Else ->
Races = dialyzer_dataflow:state__get_races(State),
- Warn = {RaceWarnTag, FileLine, RaceWarn},
+ Warn = {RaceWarnTag, WarningInfo, RaceWarn},
dialyzer_dataflow:state__put_races(add_race_warning(Warn, Races), State)
end.
diff --git a/lib/dialyzer/src/dialyzer_succ_typings.erl b/lib/dialyzer/src/dialyzer_succ_typings.erl
index 6dc4285194..7ceb19e30a 100644
--- a/lib/dialyzer/src/dialyzer_succ_typings.erl
+++ b/lib/dialyzer/src/dialyzer_succ_typings.erl
@@ -29,7 +29,7 @@
-export([analyze_callgraph/3,
analyze_callgraph/6,
- get_warnings/8
+ get_warnings/7
]).
-export([
@@ -69,10 +69,8 @@
-type scc() :: [mfa_or_funlbl()] | [module()].
-
-record(st, {callgraph :: dialyzer_callgraph:callgraph(),
codeserver :: dialyzer_codeserver:codeserver(),
- no_warn_unused :: sets:set(mfa()),
parent = none :: parent(),
timing_server :: dialyzer_timing:timing_server(),
solvers :: [solver()],
@@ -137,18 +135,17 @@ get_refined_success_typings(SCCs, #st{callgraph = Callgraph,
-type doc_plt() :: 'undefined' | dialyzer_plt:plt().
-spec get_warnings(dialyzer_callgraph:callgraph(), dialyzer_plt:plt(),
- doc_plt(), dialyzer_codeserver:codeserver(), sets:set(mfa()),
+ doc_plt(), dialyzer_codeserver:codeserver(),
dialyzer_timing:timing_server(), [solver()], pid()) ->
- {[dial_warning()], dialyzer_plt:plt(), doc_plt()}.
+ {[raw_warning()], dialyzer_plt:plt(), doc_plt()}.
get_warnings(Callgraph, Plt, DocPlt, Codeserver,
- NoWarnUnused, TimingServer, Solvers, Parent) ->
+ TimingServer, Solvers, Parent) ->
InitState =
init_state_and_get_success_typings(Callgraph, Plt, Codeserver,
TimingServer, Solvers, Parent),
- NewState = InitState#st{no_warn_unused = NoWarnUnused},
- Mods = dialyzer_callgraph:modules(NewState#st.callgraph),
- MiniPlt = NewState#st.plt,
+ Mods = dialyzer_callgraph:modules(InitState#st.callgraph),
+ MiniPlt = InitState#st.plt,
FindOpaques = lookup_and_find_opaques_fun(Codeserver),
CWarns =
dialyzer_contracts:get_invalid_contract_warnings(Mods, Codeserver,
@@ -156,31 +153,30 @@ get_warnings(Callgraph, Plt, DocPlt, Codeserver,
MiniDocPlt = dialyzer_plt:get_mini_plt(DocPlt),
ModWarns =
?timing(TimingServer, "warning",
- get_warnings_from_modules(Mods, NewState, MiniDocPlt)),
+ get_warnings_from_modules(Mods, InitState, MiniDocPlt)),
{postprocess_warnings(CWarns ++ ModWarns, Codeserver),
dialyzer_plt:restore_full_plt(MiniPlt, Plt),
dialyzer_plt:restore_full_plt(MiniDocPlt, DocPlt)}.
get_warnings_from_modules(Mods, State, DocPlt) ->
#st{callgraph = Callgraph, codeserver = Codeserver,
- no_warn_unused = NoWarnUnused, plt = Plt,
- timing_server = TimingServer} = State,
- Init = {Codeserver, Callgraph, NoWarnUnused, Plt, DocPlt},
+ plt = Plt, timing_server = TimingServer} = State,
+ Init = {Codeserver, Callgraph, Plt, DocPlt},
dialyzer_coordinator:parallel_job(warnings, Mods, Init, TimingServer).
--spec collect_warnings(module(), warnings_init_data()) -> [dial_warning()].
+-spec collect_warnings(module(), warnings_init_data()) -> [raw_warning()].
-collect_warnings(M, {Codeserver, Callgraph, NoWarnUnused, Plt, DocPlt}) ->
+collect_warnings(M, {Codeserver, Callgraph, Plt, DocPlt}) ->
ModCode = dialyzer_codeserver:lookup_mod_code(M, Codeserver),
Records = dialyzer_codeserver:lookup_mod_records(M, Codeserver),
Contracts = dialyzer_codeserver:lookup_mod_contracts(M, Codeserver),
AllFuns = collect_fun_info([ModCode]),
%% Check if there are contracts for functions that do not exist
- Warnings1 =
+ Warnings1 =
dialyzer_contracts:contracts_without_fun(Contracts, AllFuns, Callgraph),
{Warnings2, FunTypes} =
- dialyzer_dataflow:get_warnings(ModCode, Plt, Callgraph,
- Records, NoWarnUnused),
+ dialyzer_dataflow:get_warnings(ModCode, Plt, Callgraph, Codeserver,
+ Records),
Attrs = cerl:module_attrs(ModCode),
Warnings3 =
dialyzer_behaviours:check_callbacks(M, Attrs, Records, Plt, Codeserver),
@@ -197,17 +193,19 @@ postprocess_warnings(RawWarnings, Codeserver) ->
postprocess_dataflow_warns([], _Callgraph, WAcc, Acc) ->
lists:reverse(Acc, WAcc);
-postprocess_dataflow_warns([{?WARN_CONTRACT_RANGE, {CallF, CallL}, Msg}|Rest],
+postprocess_dataflow_warns([{?WARN_CONTRACT_RANGE, WarningInfo, Msg}|Rest],
Codeserver, WAcc, Acc) ->
+ {CallF, CallL, _CallMFA} = WarningInfo,
{contract_range, [Contract, M, F, A, ArgStrings, CRet]} = Msg,
case dialyzer_codeserver:lookup_mfa_contract({M,F,A}, Codeserver) of
- {ok, {{ContrF, _ContrL} = FileLine, _C, _X}} ->
+ {ok, {{ContrF, ContrL}, _C, _X}} ->
case CallF =:= ContrF of
true ->
NewMsg = {contract_range, [Contract, M, F, ArgStrings, CallL, CRet]},
- W = {?WARN_CONTRACT_RANGE, FileLine, NewMsg},
+ WarningInfo2 = {ContrF, ContrL, {M, F, A}},
+ W = {?WARN_CONTRACT_RANGE, WarningInfo2, NewMsg},
Filter =
- fun({?WARN_CONTRACT_TYPES, FL, _}) when FL =:= FileLine -> false;
+ fun({?WARN_CONTRACT_TYPES, WI, _}) when WI =:= WarningInfo2 -> false;
(_) -> true
end,
FilterWAcc = lists:filter(Filter, WAcc),
@@ -219,7 +217,7 @@ postprocess_dataflow_warns([{?WARN_CONTRACT_RANGE, {CallF, CallL}, Msg}|Rest],
%% The contract is not in a module that is currently under analysis.
%% We display the warning in the file/line of the call.
NewMsg = {contract_range, [Contract, M, F, ArgStrings, CallL, CRet]},
- W = {?WARN_CONTRACT_RANGE, {CallF, CallL}, NewMsg},
+ W = {?WARN_CONTRACT_RANGE, WarningInfo, NewMsg},
postprocess_dataflow_warns(Rest, Codeserver, WAcc, [W|Acc])
end.
@@ -262,7 +260,7 @@ refine_one_module(M, {CodeServer, Callgraph, Plt, _Solvers}) ->
Records = dialyzer_codeserver:lookup_mod_records(M, CodeServer),
FunTypes = get_fun_types_from_plt(AllFuns, Callgraph, Plt),
NewFunTypes =
- dialyzer_dataflow:get_fun_types(ModCode, Plt, Callgraph, Records),
+ dialyzer_dataflow:get_fun_types(ModCode, Plt, Callgraph, CodeServer, Records),
Contracts1 = dialyzer_codeserver:lookup_mod_contracts(M, CodeServer),
Contracts = orddict:from_list(dict:to_list(Contracts1)),
FindOpaques = find_opaques_fun(Records),
diff --git a/lib/dialyzer/src/dialyzer_typesig.erl b/lib/dialyzer/src/dialyzer_typesig.erl
index 3d03ed3ab3..217d238712 100644
--- a/lib/dialyzer/src/dialyzer_typesig.erl
+++ b/lib/dialyzer/src/dialyzer_typesig.erl
@@ -3275,7 +3275,7 @@ is_literal_record(Tree) ->
lists:member(record, Ann).
family(L) ->
- sofs:to_external(sofs:rel2fam(sofs:relation(L))).
+ dialyzer_utils:family(L).
%% ============================================================================
%%
diff --git a/lib/dialyzer/src/dialyzer_utils.erl b/lib/dialyzer/src/dialyzer_utils.erl
index e5f5c69d45..01ade00664 100644
--- a/lib/dialyzer/src/dialyzer_utils.erl
+++ b/lib/dialyzer/src/dialyzer_utils.erl
@@ -40,12 +40,16 @@
get_core_from_src/2,
get_record_and_type_info/1,
get_spec_info/3,
+ get_fun_meta_info/3,
+ is_suppressed_fun/2,
+ is_suppressed_tag/3,
merge_records/2,
pp_hook/0,
process_record_remote_types/1,
sets_filter/2,
src_compiler_opts/0,
- parallelism/0
+ parallelism/0,
+ family/1
]).
-include("dialyzer.hrl").
@@ -80,7 +84,9 @@ print_types1([{record, _Name} = Key|T], RecDict) ->
-type abstract_code() :: [tuple()]. %% XXX: import from somewhere
-type comp_options() :: [compile:option()].
--type mod_or_fname() :: atom() | file:filename().
+-type mod_or_fname() :: module() | file:filename().
+-type fa() :: {atom(), arity()}.
+-type codeserver() :: dialyzer_codeserver:codeserver().
%% ============================================================================
%%
@@ -300,7 +306,7 @@ type_record_fields([RecKey|Recs], RecDict) ->
{error, Name, Error}
end.
--spec process_record_remote_types(dialyzer_codeserver:codeserver()) -> dialyzer_codeserver:codeserver().
+-spec process_record_remote_types(codeserver()) -> codeserver().
process_record_remote_types(CServer) ->
TempRecords = dialyzer_codeserver:get_temp_records(CServer),
@@ -341,7 +347,7 @@ merge_records(NewRecords, OldRecords) ->
-type spec_dict() :: dict:dict().
-type callback_dict() :: dict:dict().
--spec get_spec_info(atom(), abstract_code(), dict:dict()) ->
+-spec get_spec_info(module(), abstract_code(), dict:dict()) ->
{'ok', spec_dict(), callback_dict()} | {'error', string()}.
get_spec_info(ModName, AbstractCode, RecordsDict) ->
@@ -359,13 +365,6 @@ get_optional_callbacks(Abs) ->
is_fa_list(O)],
lists:append(L).
-is_fa_list([{FuncName, Arity}|L])
- when is_atom(FuncName), is_integer(Arity), Arity >= 0 ->
- is_fa_list(L);
-is_fa_list([]) -> true;
-is_fa_list(_) -> false.
-
-
%% TypeSpec is a list of conditional contracts for a function.
%% Each contract is of the form {[Argument], Range, [Constraint]} where
%% - Argument and Range are in erl_types:erl_type() format and
@@ -422,6 +421,126 @@ get_spec_info([], SpecDict, CallbackDict,
_RecordsDict, _ModName, _OptCb, _File) ->
{ok, SpecDict, CallbackDict}.
+-spec get_fun_meta_info(module(), abstract_code(), [dial_warn_tag()]) ->
+ dialyzer_codeserver:fun_meta_info().
+
+get_fun_meta_info(M, Abs, LegalWarnings) ->
+ NoWarn = get_nowarn_unused_function(M, Abs),
+ FuncSupp = get_func_suppressions(M, Abs),
+ Warnings0 = get_options(Abs, LegalWarnings),
+ Warnings = ordsets:to_list(Warnings0),
+ ModuleWarnings = [{M, W} || W <- Warnings],
+ RawProps = lists:append([NoWarn, FuncSupp, ModuleWarnings]),
+ process_options(dialyzer_utils:family(RawProps), Warnings0).
+
+process_options([{M, _}=Mod|Left], Warnings) when is_atom(M) ->
+ [Mod|process_options(Left, Warnings)];
+process_options([{{_M, _F, _A}=MFA, Opts}|Left], Warnings) ->
+ WL = case lists:member(nowarn_function, Opts) of
+ true -> [{nowarn_function, func}]; % takes precedence
+ false ->
+ Ws = dialyzer_options:build_warnings(Opts, Warnings),
+ ModOnly = [{W, mod} || W <- ordsets:subtract(Warnings, Ws)],
+ FunOnly = [{W, func} || W <- ordsets:subtract(Ws, Warnings)],
+ ordsets:union(ModOnly, FunOnly)
+ end,
+ case WL of
+ [] -> process_options(Left, Warnings);
+ _ -> [{MFA, WL}|process_options(Left, Warnings)]
+ end;
+process_options([], _Warnings) -> [].
+
+-spec get_nowarn_unused_function(module(), abstract_code()) ->
+ [{mfa(), 'no_unused'}].
+
+get_nowarn_unused_function(M, Abs) ->
+ Opts = get_options_with_tag(compile, Abs),
+ Warn = erl_lint:bool_option(warn_unused_function, nowarn_unused_function,
+ true, Opts),
+ Functions = [{F, A} || {function, _, F, A, _} <- Abs],
+ AttrFile = collect_attribute(Abs, compile),
+ TagsFaList = check_fa_list(AttrFile, nowarn_unused_function, Functions),
+ FAs = case Warn of
+ false -> Functions;
+ true ->
+ [FA || {{nowarn_unused_function,_L,_File}, FA} <- TagsFaList]
+ end,
+ [{{M, F, A}, no_unused} || {F, A} <- FAs].
+
+-spec get_func_suppressions(module(), abstract_code()) ->
+ [{mfa(), 'nowarn_function' | dial_warn_tag()}].
+
+get_func_suppressions(M, Abs) ->
+ Functions = [{F, A} || {function, _, F, A, _} <- Abs],
+ AttrFile = collect_attribute(Abs, dialyzer),
+ TagsFAs = check_fa_list(AttrFile, '*', Functions),
+ %% Check the options:
+ Fun = fun({{nowarn_function, _L, _File}, _FA}) -> ok;
+ ({OptLFile, _FA}) ->
+ _ = get_options1([OptLFile], ordsets:new())
+ end,
+ lists:foreach(Fun, TagsFAs),
+ [{{M, F, A}, W} || {{W, _L, _File}, {F, A}} <- TagsFAs].
+
+-spec get_options(abstract_code(), [dial_warn_tag()]) ->
+ ordsets:ordset(dial_warn_tag()).
+
+get_options(Abs, LegalWarnings) ->
+ AttrFile = collect_attribute(Abs, dialyzer),
+ get_options1(AttrFile, LegalWarnings).
+
+get_options1([{Args, L, File}|Left], Warnings) ->
+ Opts = [O ||
+ O <- lists:flatten([Args]),
+ is_atom(O)],
+ try dialyzer_options:build_warnings(Opts, Warnings) of
+ NewWarnings ->
+ get_options1(Left, NewWarnings)
+ catch
+ throw:{dialyzer_options_error, Msg} ->
+ Msg1 = flat_format(" ~s:~w: ~s", [File, L, Msg]),
+ throw({error, Msg1})
+ end;
+get_options1([], Warnings) ->
+ Warnings.
+
+-type collected_attribute() ::
+ {Args :: term(), erl_scan:line(), file:filename()}.
+
+collect_attribute(Abs, Tag) ->
+ collect_attribute(Abs, Tag, "nofile").
+
+collect_attribute([{attribute, L, Tag, Args}|Left], Tag, File) ->
+ CollAttr = {Args, L, File},
+ [CollAttr | collect_attribute(Left, Tag, File)];
+collect_attribute([{attribute, _, file, {IncludeFile, _}}|Left], Tag, _) ->
+ collect_attribute(Left, Tag, IncludeFile);
+collect_attribute([_Other|Left], Tag, File) ->
+ collect_attribute(Left, Tag, File);
+collect_attribute([], _Tag, _File) -> [].
+
+-spec is_suppressed_fun(mfa(), codeserver()) -> boolean().
+
+is_suppressed_fun(MFA, CodeServer) ->
+ lookup_fun_property(MFA, nowarn_function, CodeServer).
+
+-spec is_suppressed_tag(mfa() | module(), dial_warn_tag(), codeserver()) ->
+ boolean().
+
+is_suppressed_tag(MorMFA, Tag, Codeserver) ->
+ not lookup_fun_property(MorMFA, Tag, Codeserver).
+
+lookup_fun_property({M, _F, _A}=MFA, Property, CodeServer) ->
+ MFAPropList = dialyzer_codeserver:lookup_meta_info(MFA, CodeServer),
+ case proplists:get_value(Property, MFAPropList, no) of
+ mod -> false; % suppressed in function
+ func -> true; % requested in function
+ no -> lookup_fun_property(M, Property, CodeServer)
+ end;
+lookup_fun_property(M, Property, CodeServer) when is_atom(M) ->
+ MPropList = dialyzer_codeserver:lookup_meta_info(M, CodeServer),
+ proplists:is_defined(Property, MPropList).
+
%% ============================================================================
%%
%% Exported types
@@ -503,6 +622,57 @@ format_sig(Type, RecDict) ->
flat_format(Fmt, Lst) ->
lists:flatten(io_lib:format(Fmt, Lst)).
+-spec get_options_with_tag(atom(), abstract_code()) -> [term()].
+
+get_options_with_tag(Tag, Abs) ->
+ lists:flatten([O || {attribute, _, Tag0, O} <- Abs, Tag =:= Tag0]).
+
+%% Check F/A, and collect (unchecked) warning tags with line and file.
+-spec check_fa_list([collected_attribute()], atom(), [fa()]) ->
+ [{{atom(), erl_scan:line(), file:filename()},fa()}].
+
+check_fa_list(AttrFile, Tag, Functions) ->
+ FuncTab = gb_sets:from_list(Functions),
+ check_fa_list1(AttrFile, Tag, FuncTab).
+
+check_fa_list1([{Args, L, File}|Left], Tag, Funcs) ->
+ TermsL = [{{Tag0, L, File}, Term} ||
+ {Tags, Terms0} <- lists:flatten([Args]),
+ Tag0 <- lists:flatten([Tags]),
+ Tag =:= '*' orelse Tag =:= Tag0,
+ Term <- lists:flatten([Terms0])],
+ case lists:dropwhile(fun({_, T}) -> is_fa(T) end, TermsL) of
+ [] -> ok;
+ [{_, Bad}|_] ->
+ Msg1 = flat_format(" Bad function ~w in line ~s:~w",
+ [Bad, File, L]),
+ throw({error, Msg1})
+ end,
+ case lists:dropwhile(fun({_, FA}) -> is_known(FA, Funcs) end, TermsL) of
+ [] -> ok;
+ [{_, {F, A}}|_] ->
+ Msg2 = flat_format(" Unknown function ~w/~w in line ~s:~w",
+ [F, A, File, L]),
+ throw({error, Msg2})
+ end,
+ TermsL ++ check_fa_list1(Left, Tag, Funcs);
+check_fa_list1([], _Tag, _Funcs) -> [].
+
+is_known(FA, Funcs) ->
+ gb_sets:is_element(FA, Funcs).
+
+-spec is_fa_list(term()) -> boolean().
+
+is_fa_list([E|L]) -> is_fa(E) andalso is_fa_list(L);
+is_fa_list([]) -> true;
+is_fa_list(_) -> false.
+
+-spec is_fa(term()) -> boolean().
+
+is_fa({FuncName, Arity})
+ when is_atom(FuncName), is_integer(Arity), Arity >= 0 -> true;
+is_fa(_) -> false.
+
%%-------------------------------------------------------------------
%% Author : Per Gustafsson <[email protected]>
%% Description : Provides better printing of binaries.
@@ -607,3 +777,8 @@ parallelism() ->
CPUs = erlang:system_info(logical_processors_available),
Schedulers = erlang:system_info(schedulers),
min(CPUs, Schedulers).
+
+-spec family([{K,V}]) -> [{K,[V]}].
+
+family(L) ->
+ sofs:to_external(sofs:rel2fam(sofs:relation(L))).
diff --git a/lib/dialyzer/test/race_SUITE_data/src/ets_insert_args1_suppressed.erl b/lib/dialyzer/test/race_SUITE_data/src/ets_insert_args1_suppressed.erl
new file mode 100644
index 0000000000..5134cc6f0b
--- /dev/null
+++ b/lib/dialyzer/test/race_SUITE_data/src/ets_insert_args1_suppressed.erl
@@ -0,0 +1,19 @@
+%% This tests the presence of possible races due to an ets:lookup/ets:insert
+%% combination. It takes into account the argument types of the calls.
+
+-module(ets_insert_args1_suppressed).
+-export([start/0]).
+
+-dialyzer({nowarn_function,start/0}).
+
+start() ->
+ F = fun(T) -> [{_, N}] = ets:lookup(T, counter),
+ ets:insert(T, [{counter, N+1}])
+ end,
+ io:format("Created ~w\n", [ets:new(foo, [named_table, public])]),
+ ets:insert(foo, {counter, 0}),
+ io:format("Inserted ~w\n", [{counter, 0}]),
+ F(foo),
+ io:format("Update complete\n", []),
+ ObjectList = ets:lookup(foo, counter),
+ io:format("Counter: ~w\n", [ObjectList]).
diff --git a/lib/dialyzer/test/small_SUITE_data/results/blame_contract_range_suppressed b/lib/dialyzer/test/small_SUITE_data/results/blame_contract_range_suppressed
new file mode 100644
index 0000000000..40733434f6
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/results/blame_contract_range_suppressed
@@ -0,0 +1,2 @@
+
+blame_contract_range_suppressed.erl:8: Function foo/0 has no local return
diff --git a/lib/dialyzer/test/small_SUITE_data/results/request1 b/lib/dialyzer/test/small_SUITE_data/results/request1
new file mode 100644
index 0000000000..0cf4017403
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/results/request1
@@ -0,0 +1,2 @@
+
+request1.erl:8: Expression produces a value of type {'a','b'}, but this value is unmatched
diff --git a/lib/dialyzer/test/small_SUITE_data/results/suppress_request b/lib/dialyzer/test/small_SUITE_data/results/suppress_request
new file mode 100644
index 0000000000..18e82b7972
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/results/suppress_request
@@ -0,0 +1,6 @@
+
+suppress_request.erl:21: Expression produces a value of type {'a','b'}, but this value is unmatched
+suppress_request.erl:25: Expression produces a value of type {'a','b'}, but this value is unmatched
+suppress_request.erl:35: Function test3_b/0 has no local return
+suppress_request.erl:39: Guard test 2 =:= A::fun((none()) -> no_return()) can never succeed
+suppress_request.erl:7: Type specification suppress_request:test1('a' | 'b') -> 'ok' is a subtype of the success typing: suppress_request:test1('a' | 'b' | 'c') -> 'ok'
diff --git a/lib/dialyzer/test/small_SUITE_data/src/blame_contract_range_suppressed.erl b/lib/dialyzer/test/small_SUITE_data/src/blame_contract_range_suppressed.erl
new file mode 100644
index 0000000000..8b66d35083
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/blame_contract_range_suppressed.erl
@@ -0,0 +1,15 @@
+%%-----------------------------------------------------------------------
+%% Like ./blame_contract_range.erl, but warning is suppressed.
+%%-----------------------------------------------------------------------
+-module(blame_contract_range_suppressed).
+
+-export([foo/0]).
+
+foo() ->
+ bar(b).
+
+-dialyzer({nowarn_function, bar/1}).
+
+-spec bar(atom()) -> a.
+bar(a) -> a;
+bar(b) -> b.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/maps_redef2.erl b/lib/dialyzer/test/small_SUITE_data/src/maps_redef2.erl
new file mode 100644
index 0000000000..945b2a9144
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/maps_redef2.erl
@@ -0,0 +1,23 @@
+%% In 17, the linter says that map(A) redefines 'type map', which is
+%% allowed until next release. However, Dialyzer used to replace
+%% map(A) with #{}, which resulted in warnings.
+
+-module(maps_redef2).
+
+-export([t/0]).
+
+-type map(_A) :: integer().
+
+t() ->
+ M = new(),
+ t1(M).
+
+-spec t1(map(_)) -> map(_).
+
+t1(A) ->
+ A + A.
+
+-spec new() -> map(_).
+
+new() ->
+ 3.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/request1.erl b/lib/dialyzer/test/small_SUITE_data/src/request1.erl
new file mode 100644
index 0000000000..a6c4ab8dbd
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/request1.erl
@@ -0,0 +1,12 @@
+-module(request1).
+
+-export([a/0]).
+
+-dialyzer(unmatched_returns).
+
+a() ->
+ b(),
+ 1.
+
+b() ->
+ {a, b}.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/suppress_request.erl b/lib/dialyzer/test/small_SUITE_data/src/suppress_request.erl
new file mode 100644
index 0000000000..c4275fa110
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/suppress_request.erl
@@ -0,0 +1,50 @@
+-module(suppress_request).
+
+-export([test1/1, test1_b/1, test2/0, test2_b/0,
+ test3/0, test3_b/0, test4/0, test4_b/0]).
+
+-dialyzer({[specdiffs], test1/1}).
+-spec test1(a | b) -> ok. % spec is subtype
+test1(A) ->
+ ok = test1_1(A).
+
+-spec test1_b(a | b) -> ok. % spec is subtype (suppressed by default)
+test1_b(A) ->
+ ok = test1_1(A).
+
+-spec test1_1(a | b | c) -> ok.
+test1_1(_) ->
+ ok.
+
+-dialyzer(unmatched_returns).
+test2() ->
+ tuple(), % unmatched
+ ok.
+
+test2_b() ->
+ tuple(), % unmatched
+ ok.
+
+-dialyzer({[no_return, no_match], [test3/0]}).
+test3() -> % no local return (suppressed)
+ A = fun(_) ->
+ 1
+ end,
+ A = 2. % can never succeed (suppressed)
+
+test3_b() -> % no local return (requested by default)
+ A = fun(_) ->
+ 1
+ end,
+ A = 2. % can never succeed (requested by default)
+
+-dialyzer(no_improper_lists).
+test4() ->
+ [1 | 2]. % improper list (suppressed)
+
+-dialyzer({no_improper_lists, test4_b/0}).
+test4_b() ->
+ [1 | 2]. % improper list (suppressed)
+
+tuple() ->
+ {a, b}.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/suppression1.erl b/lib/dialyzer/test/small_SUITE_data/src/suppression1.erl
new file mode 100644
index 0000000000..00534704c3
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/suppression1.erl
@@ -0,0 +1,33 @@
+-module(suppression1).
+
+-export([a/1, b/1, c/0]).
+
+-dialyzer({nowarn_function, a/1}).
+
+-spec a(_) -> integer().
+
+a(_) ->
+ A = fun(_) ->
+ B = fun(_) ->
+ x = 7
+ end,
+ B = 1
+ end,
+ A.
+
+-spec b(_) -> integer().
+
+-dialyzer({nowarn_function, b/1}).
+
+b(_) ->
+ A = fun(_) ->
+ 1
+ end,
+ A = 2.
+
+-record(r, {a = a :: integer()}).
+
+-dialyzer({nowarn_function, c/0}).
+
+c() ->
+ #r{}.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/suppression2.erl b/lib/dialyzer/test/small_SUITE_data/src/suppression2.erl
new file mode 100644
index 0000000000..4cba53fdce
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/suppression2.erl
@@ -0,0 +1,32 @@
+-module(suppression2).
+
+-export([a/1, b/1, c/0]).
+
+-dialyzer({nowarn_function, [a/1, b/1, c/0]}).
+-dialyzer([no_undefined_callbacks]).
+
+-behaviour(not_a_behaviour).
+
+-spec a(_) -> integer().
+
+a(_) ->
+ A = fun(_) ->
+ B = fun(_) ->
+ x = 7
+ end,
+ B = 1
+ end,
+ A.
+
+-spec b(_) -> integer().
+
+b(_) ->
+ A = fun(_) ->
+ 1
+ end,
+ A = 2.
+
+-record(r, {a = a :: integer()}).
+
+c() ->
+ #r{}.
diff --git a/lib/edoc/doc/overview.edoc b/lib/edoc/doc/overview.edoc
index 2af425272e..0ced8cab32 100644
--- a/lib/edoc/doc/overview.edoc
+++ b/lib/edoc/doc/overview.edoc
@@ -76,8 +76,6 @@ The following are the main functions for running EDoc:
<ul>
<li>{@link edoc:application/2}: Creates documentation for a
typical Erlang application.</li>
- <li>{@link edoc:packages/2}: Creates documentation for one or
- more packages, automatically locating source files.</li>
<li>{@link edoc:files/2}: Creates documentation for a
specified set of source files.</li>
<li>{@link edoc:run/3}: General interface function; the common
@@ -184,7 +182,7 @@ The following tags can be used anywhere within a module:
path (see {@link edoc:read_source/2}).</dd>
<dt><a name="gtag-todo">`@todo' (or `@TODO')</a></dt>
- <dd>Attaches a To-Do note to a function, module, package, or
+ <dd>Attaches a To-Do note to a function, module or
overview-page. The content can be any XHTML text describing
the issue, e.g.:
```%% @TODO Finish writing the documentation.'''
@@ -338,7 +336,7 @@ The following tags can be used before a module declaration:
<dt><a name="mtag-since">`@since'</a></dt>
<dd>Specifies when the module was introduced, with respect to
- the application, package, release or distribution it is part
+ the application, release or distribution it is part
of. The content can be arbitrary text.</dd>
<dt><a name="mtag-version">`@version'</a></dt>
@@ -445,7 +443,6 @@ possible formats for references are:
<table border="1" summary="reference syntax">
<tr><th>Reference syntax</th><th>Example</th><th>Scope</th></tr>
<tr><td>`Module'</td><td>{@link edoc_run}, `erl.lang.list'</td><td>Global</td></tr>
- <tr><td>`Package.*'</td><td>`erl.lang.*'</td><td>Global</td></tr>
<tr><td>`Function/Arity'</td><td>`file/2'</td><td>Within module</td></tr>
<tr><td>`Module:Function/Arity'</td><td>{@link edoc:application/2}</td><td>Global</td></tr>
<tr><td>`Type()'</td><td>`filename()'</td><td>Within module</td></tr>
@@ -531,7 +528,7 @@ after the empty line into separate paragraphs. For example:
```%% @doc This will all be part of the first paragraph.
%% It can stretch over several lines and contain <em>any
%% XHTML markup</em>.
- %%
+ %%
%% This is the second paragraph. The above line is
%% regarded as "empty" by EDoc, even though it ends with
%% a space.'''
@@ -685,17 +682,6 @@ information. User-defined macros override predefined macros.
<dd>Expands to the current date, as "<tt>Month Day Year</tt>",
e.g. "{@date}".</dd>
- <dt><a name="predefmacro-docRoot"><code>@{@docRoot}</code></a></dt>
- <dd>Expands to the relative URL path (such as
- `"../../.."') from the current page to the root
- directory of the generated documentation. This can be used to
- create XHTML references such as `<img
- src="@{@docRoot}/images/logo.jpeg">' that are independent of how
- deep down in a package structure they occur. If packages are not
- used (i.e., if all modules are in the "empty" package),
- <code>@{@docRoot}</code> will always resolve to the empty
- string.</dd>
-
<dt><a name="predefmacro-link"><code>@{@link <em>reference</em>.
<em>description</em>}</code></a></dt>
<dd>This creates a hypertext link; cf. the
@@ -710,9 +696,6 @@ information. User-defined macros override predefined macros.
<dd>Expands to the name of the current module. Only defined when a
module is being processed.</dd>
- <dt><a name="predefmacro-package"><code>@{@package}</code></a></dt>
- <dd>Expands to the name of the current package.</dd>
-
<dt><a name="predefmacro-section"><code>@{@section
<em>heading</em>}</code></a></dt>
<dd>Expands to a hypertext link to the specified section heading;
diff --git a/lib/edoc/include/edoc_doclet.hrl b/lib/edoc/include/edoc_doclet.hrl
index 60ec7f44e4..ac6763fb33 100644
--- a/lib/edoc/include/edoc_doclet.hrl
+++ b/lib/edoc/include/edoc_doclet.hrl
@@ -1,6 +1,6 @@
%% =====================================================================
%% Header file for EDoc doclet modules.
-%%
+%%
%% Copyright (C) 2001-2004 Richard Carlsson
%%
%% This library is free software; you can redistribute it and/or modify
@@ -43,16 +43,11 @@
%% @type doclet_gen() = #doclet_gen{sources = [string()],
%% app = no_app() | atom(),
-%% packages = [atom()],
-%% modules = [atom()],
-%% modules = [atom()],
-%% filemap = function()}
+%% modules = [atom()]}
-record(doclet_gen, {sources = [],
app = ?NO_APP,
- packages = [],
- modules = [],
- filemap
+ modules = []
}).
%% @type doclet_toc() = #doclet_gen{paths = [string()],
diff --git a/lib/edoc/priv/edoc.dtd b/lib/edoc/priv/edoc.dtd
index ba4ac0db28..4278a9e643 100644
--- a/lib/edoc/priv/edoc.dtd
+++ b/lib/edoc/priv/edoc.dtd
@@ -2,20 +2,13 @@
<!-- EDoc DTD Version 0.3 -->
<!ELEMENT overview (title, description?, author*, copyright?, version?,
- since?, see*, reference*, todo?, packages, modules)>
+ since?, see*, reference*, todo?, modules)>
<!ATTLIST overview
root CDATA #IMPLIED
encoding CDATA #IMPLIED>
<!ELEMENT title (#PCDATA)>
-<!ELEMENT package (description?, author*, copyright?, version?,
- since?, deprecated?, see*, reference*, todo?,
- modules)>
-<!ATTLIST package
- name CDATA #REQUIRED
- root CDATA #IMPLIED>
-
<!ELEMENT modules (module+)>
diff --git a/lib/edoc/priv/stylesheet.css b/lib/edoc/priv/stylesheet.css
index e426a90483..ab170c091f 100644
--- a/lib/edoc/priv/stylesheet.css
+++ b/lib/edoc/priv/stylesheet.css
@@ -27,10 +27,10 @@ div.spec {
margin-left: 2em;
background-color: #eeeeee;
}
-a.module,a.package {
+a.module {
text-decoration:none
}
-a.module:hover,a.package:hover {
+a.module:hover {
background-color: #eeeeee;
}
ul.definitions {
diff --git a/lib/edoc/src/edoc.erl b/lib/edoc/src/edoc.erl
index 983f04e8b6..78915e8943 100644
--- a/lib/edoc/src/edoc.erl
+++ b/lib/edoc/src/edoc.erl
@@ -24,12 +24,11 @@
%% TODO: option for ignoring functions matching some pattern ('..._test_'/0)
%% TODO: @private_type tag, opaque unless generating private docs?
%% TODO: document the record type syntax
-%% TODO: some 'skip' option for ignoring particular modules/packages?
-%% TODO: intermediate-level packages: document even if no local sources.
+%% TODO: some 'skip' option for ignoring particular modules?
%% TODO: multiline comment support (needs modified comment representation)
%% TODO: config-file for default settings
%% TODO: config: locations of all local docdirs; generate local doc-index page
-%% TODO: config: URL:s of offline packages/apps
+%% TODO: config: URL:s of offline apps
%% TODO: config: default stylesheet
%% TODO: config: default header/footer, etc.
%% TODO: offline linkage
@@ -45,10 +44,10 @@
-module(edoc).
--export([packages/1, packages/2, files/1, files/2,
+-export([files/1, files/2,
application/1, application/2, application/3,
toc/1, toc/2, toc/3,
- run/3,
+ run/2,
file/1, file/2,
read/1, read/2,
layout/1, layout/2,
@@ -68,15 +67,15 @@
file(Name) ->
file(Name, []).
-%% @spec file(filename(), proplist()) -> ok
+%% @spec file(filename(), proplist()) -> ok
%%
%% @type filename() = //kernel/file:filename()
%% @type proplist() = [term()]
%%
%% @deprecated This is part of the old interface to EDoc and is mainly
%% kept for backwards compatibility. The preferred way of generating
-%% documentation is through one of the functions {@link application/2},
-%% {@link packages/2} and {@link files/2}.
+%% documentation is through one of the functions {@link application/2}
+%% and {@link files/2}.
%%
%% @doc Reads a source code file and outputs formatted documentation to
%% a corresponding file.
@@ -121,44 +120,24 @@ file(Name, Options) ->
?DEFAULT_FILE_SUFFIX),
Dir = proplists:get_value(dir, Options, filename:dirname(Name)),
Encoding = [{encoding, edoc_lib:read_encoding(Name, [])}],
- edoc_lib:write_file(Text, Dir, BaseName ++ Suffix, '', Encoding).
+ edoc_lib:write_file(Text, Dir, BaseName ++ Suffix, Encoding).
-%% TODO: better documentation of files/1/2, packages/1/2, application/1/2/3
+%% TODO: better documentation of files/1/2, application/1/2/3
-%% @spec (Files::[filename() | {package(), [filename()]}]) -> ok
-%% @equiv packages(Packages, [])
+%% @spec (Files::[filename()]) -> ok
files(Files) ->
files(Files, []).
-%% @spec (Files::[filename() | {package(), [filename()]}],
+%% @spec (Files::[filename()],
%% Options::proplist()) -> ok
-%% @doc Runs EDoc on a given set of source files. See {@link run/3} for
+%% @doc Runs EDoc on a given set of source files. See {@link run/2} for
%% details, including options.
%% @equiv run([], Files, Options)
files(Files, Options) ->
- run([], Files, Options).
-
-%% @spec (Packages::[package()]) -> ok
-%% @equiv packages(Packages, [])
-
-packages(Packages) ->
- packages(Packages, []).
-
-%% @spec (Packages::[package()], Options::proplist()) -> ok
-%% @type package() = atom() | string()
-%%
-%% @doc Runs EDoc on a set of packages. The `source_path' option is used
-%% to locate the files; see {@link run/3} for details, including
-%% options. This function automatically appends the current directory to
-%% the source path.
-%%
-%% @equiv run(Packages, [], Options)
-
-packages(Packages, Options) ->
- run(Packages, [], Options ++ [{source_path, [?CURRENT_DIR]}]).
+ run(Files, Options).
%% @spec (Application::atom()) -> ok
%% @equiv application(Application, [])
@@ -194,7 +173,7 @@ application(App, Options) when is_atom(App) ->
%% subdirectory, if it exists, or otherwise in the application
%% directory itself.
%% </li>
-%% <li>The {@link run/3. `subpackages'} option is turned on. All found
+%% <li>The {@link run/2. `subpackages'} option is turned on. All found
%% source files will be processed.
%% </li>
%% <li>The `include' subdirectory is automatically added to the
@@ -203,7 +182,7 @@ application(App, Options) when is_atom(App) ->
%% </li>
%% </ul>
%%
-%% See {@link run/3} for details, including options.
+%% See {@link run/2} for details, including options.
%%
%% @see application/2
@@ -219,7 +198,7 @@ application(App, Dir, Options) when is_atom(App) ->
{includes, [filename:join(Dir, "include")]}],
Opts1 = set_app_default(App, Dir, Opts),
%% Recursively document all subpackages of '' - i.e., everything.
- run([''], [], [{application, App} | Opts1]).
+ run([], [{application, App} | Opts1]).
%% Try to set up a default application base URI in a smart way if the
%% user has not specified it explicitly.
@@ -240,31 +219,20 @@ set_app_default(App, Dir0, Opts) ->
Opts
end.
-%% If no source files are found for a (specified) package, no package
-%% documentation will be generated either (even if there is a
-%% package-documentation file). This is the way it should be. For
-%% specified files, use empty package (unless otherwise specified). The
-%% assumed package is always used for creating the output. If the actual
-%% module or package of the source differs from the assumption gathered
-%% from the path and file name, a warning should be issued (since links
-%% are likely to be incorrect).
-
opt_defaults() ->
- [packages].
+ [].
opt_negations() ->
[{no_preprocess, preprocess},
{no_subpackages, subpackages},
- {no_report_missing_types, report_missing_types},
- {no_packages, packages}].
+ {no_report_missing_types, report_missing_types}].
-%% @spec run(Packages::[package()],
-%% Files::[filename() | {package(), [filename()]}],
+%% @spec run(Files::[filename()],
%% Options::proplist()) -> ok
-%% @doc Runs EDoc on a given set of source files and/or packages. Note
+%% @doc Runs EDoc on a given set of source files. Note
%% that the doclet plugin module has its own particular options; see the
%% `doclet' option below.
-%%
+%%
%% Also see {@link layout/2} for layout-related options, and
%% {@link get_doc/2} for options related to reading source
%% files.
@@ -298,11 +266,6 @@ opt_negations() ->
%% The default doclet module is {@link edoc_doclet}; see {@link
%% edoc_doclet:run/2} for doclet-specific options.
%% </dd>
-%% <dt>{@type {exclude_packages, [package()]@}}
-%% </dt>
-%% <dd>Lists packages to be excluded from the documentation. Typically
-%% used in conjunction with the `subpackages' option.
-%% </dd>
%% <dt>{@type {file_suffix, string()@}}
%% </dt>
%% <dd>Specifies the suffix used for output files. The default value is
@@ -314,22 +277,6 @@ opt_negations() ->
%% target directory will be ignored and overwritten. The default
%% value is `false'.
%% </dd>
-%% <dt>{@type {packages, boolean()@}}
-%% </dt>
-%% <dd>If the value is `true', it it assumed that packages (module
-%% namespaces) are being used, and that the source code directory
-%% structure reflects this. The default value is `true'. (Usually,
-%% this does the right thing even if all the modules belong to the
-%% top-level "empty" package.) `no_packages' is an alias for
-%% `{packages, false}'. See the `subpackages' option below for
-%% further details.
-%%
-%% If the source code is organized in a hierarchy of
-%% subdirectories although it does not use packages, use
-%% `no_packages' together with the recursive-search `subpackages'
-%% option (on by default) to automatically generate documentation
-%% for all the modules.
-%% </dd>
%% <dt>{@type {source_path, [filename()]@}}
%% </dt>
%% <dd>Specifies a list of file system paths used to locate the source
@@ -345,7 +292,7 @@ opt_negations() ->
%% <dd>If the value is `true', all subpackages of specified packages
%% will also be included in the documentation. The default value is
%% `false'. `no_subpackages' is an alias for `{subpackages,
-%% false}'. See also the `exclude_packages' option.
+%% false}'.
%%
%% Subpackage source files are found by recursively searching
%% for source code files in subdirectories of the known source code
@@ -358,38 +305,31 @@ opt_negations() ->
%% </dl>
%%
%% @see files/2
-%% @see packages/2
%% @see application/2
%% NEW-OPTIONS: source_path, application
%% INHERIT-OPTIONS: init_context/1
%% INHERIT-OPTIONS: expand_sources/2
%% INHERIT-OPTIONS: target_dir_info/5
-%% INHERIT-OPTIONS: edoc_lib:find_sources/3
+%% INHERIT-OPTIONS: edoc_lib:find_sources/2
%% INHERIT-OPTIONS: edoc_lib:run_doclet/2
%% INHERIT-OPTIONS: edoc_lib:get_doc_env/4
-run(Packages, Files, Opts0) ->
+run(Files, Opts0) ->
Opts = expand_opts(Opts0),
Ctxt = init_context(Opts),
Dir = Ctxt#context.dir,
Path = proplists:append_values(source_path, Opts),
- Ss = sources(Path, Packages, Opts),
+ Ss = sources(Path, Opts),
{Ss1, Ms} = expand_sources(expand_files(Files) ++ Ss, Opts),
- Ps = [P || {_, P, _, _} <- Ss1],
App = proplists:get_value(application, Opts, ?NO_APP),
- {App1, Ps1, Ms1} = target_dir_info(Dir, App, Ps, Ms, Opts),
- %% The "empty package" is never included in the list of packages.
- Ps2 = edoc_lib:unique(lists:sort(Ps1)) -- [''],
+ {App1, Ms1} = target_dir_info(Dir, App, Ms, Opts),
Ms2 = edoc_lib:unique(lists:sort(Ms1)),
- Fs = package_files(Path, Ps2),
- Env = edoc_lib:get_doc_env(App1, Ps2, Ms2, Opts),
+ Env = edoc_lib:get_doc_env(App1, Ms2, Opts),
Ctxt1 = Ctxt#context{env = Env},
Cmd = #doclet_gen{sources = Ss1,
app = App1,
- packages = Ps2,
- modules = Ms2,
- filemap = Fs
+ modules = Ms2
},
F = fun (M) ->
M:run(Cmd, Ctxt1)
@@ -401,42 +341,22 @@ expand_opts(Opts0) ->
Opts0 ++ opt_defaults()).
%% NEW-OPTIONS: dir
-%% DEFER-OPTIONS: run/3
+%% DEFER-OPTIONS: run/2
init_context(Opts) ->
#context{dir = proplists:get_value(dir, Opts, ?CURRENT_DIR),
opts = Opts
}.
-%% INHERIT-OPTIONS: edoc_lib:find_sources/3
-
-sources(Path, Packages, Opts) ->
- lists:foldl(fun (P, Xs) ->
- edoc_lib:find_sources(Path, P, Opts) ++ Xs
- end,
- [], Packages).
-
-package_files(Path, Packages) ->
- Name = ?PACKAGE_FILE, % this is hard-coded for now
- D = lists:foldl(fun (P, D) ->
- F = edoc_lib:find_file(Path, P, Name),
- dict:store(P, F, D)
- end,
- dict:new(), Packages),
- fun (P) ->
- case dict:find(P, D) of
- {ok, F} -> F;
- error -> ""
- end
- end.
+%% INHERIT-OPTIONS: edoc_lib:find_sources/2
+
+sources(Path, Opts) ->
+ edoc_lib:find_sources(Path, Opts).
%% Expand user-specified sets of files.
-expand_files([{P, Fs1} | Fs]) ->
- [{P, filename:basename(F), filename:dirname(F)} || F <- Fs1]
- ++ expand_files(Fs);
expand_files([F | Fs]) ->
- [{'', filename:basename(F), filename:dirname(F)} |
+ [{filename:basename(F), filename:dirname(F)} |
expand_files(Fs)];
expand_files([]) ->
[].
@@ -444,26 +364,23 @@ expand_files([]) ->
%% Create the (assumed) full module names. Keep only the first source
%% for each module, but preserve the order of the list.
-%% NEW-OPTIONS: source_suffix, packages
-%% DEFER-OPTIONS: run/3
+%% NEW-OPTIONS: source_suffix
+%% DEFER-OPTIONS: run/2
expand_sources(Ss, Opts) ->
Suffix = proplists:get_value(source_suffix, Opts,
?DEFAULT_SOURCE_SUFFIX),
- Ss1 = case proplists:get_bool(packages, Opts) of
- true -> Ss;
- false -> [{'',F,D} || {_P,F,D} <- Ss]
- end,
+ Ss1 = [{F,D} || {F,D} <- Ss],
expand_sources(Ss1, Suffix, sets:new(), [], []).
-expand_sources([{'', F, D} | Fs], Suffix, S, As, Ms) ->
+expand_sources([{F, D} | Fs], Suffix, S, As, Ms) ->
M = list_to_atom(filename:rootname(F, Suffix)),
case sets:is_element(M, S) of
true ->
expand_sources(Fs, Suffix, S, As, Ms);
false ->
S1 = sets:add_element(M, S),
- expand_sources(Fs, Suffix, S1, [{M, '', F, D} | As],
+ expand_sources(Fs, Suffix, S1, [{M, F, D} | As],
[M | Ms])
end;
expand_sources([], _Suffix, _S, As, Ms) ->
@@ -471,16 +388,15 @@ expand_sources([], _Suffix, _S, As, Ms) ->
%% NEW-OPTIONS: new
-target_dir_info(Dir, App, Ps, Ms, Opts) ->
+target_dir_info(Dir, App, Ms, Opts) ->
case proplists:get_bool(new, Opts) of
true ->
- {App, Ps, Ms};
+ {App, Ms};
false ->
- {App1, Ps1, Ms1} = edoc_lib:read_info_file(Dir),
+ {App1, Ms1} = edoc_lib:read_info_file(Dir),
{if App == ?NO_APP -> App1;
true -> App
end,
- Ps ++ Ps1,
Ms ++ Ms1}
end.
@@ -510,7 +426,7 @@ toc(Dir, Opts) ->
toc(Dir, Paths, Opts0) ->
Opts = expand_opts(Opts0 ++ [{dir, Dir}]),
Ctxt = init_context(Opts),
- Env = edoc_lib:get_doc_env('', [], [], Opts),
+ Env = edoc_lib:get_doc_env('', [], Opts),
Ctxt1 = Ctxt#context{env = Env},
F = fun (M) ->
M:run(#doclet_toc{paths=Paths}, Ctxt1)
@@ -562,7 +478,7 @@ layout(Doc) ->
%% </dl>
%%
%% @see layout/1
-%% @see run/3
+%% @see run/2
%% @see read/2
%% @see file/2
@@ -856,7 +772,7 @@ get_doc(File) ->
%% edoc_lib:get_doc_env/4} for further options.
%%
%% @see get_doc/3
-%% @see run/3
+%% @see run/2
%% @see edoc_extract:source/5
%% @see read/2
%% @see layout/2
diff --git a/lib/edoc/src/edoc.hrl b/lib/edoc/src/edoc.hrl
index 44c5d6fef4..5b0fb68cf9 100644
--- a/lib/edoc/src/edoc.hrl
+++ b/lib/edoc/src/edoc.hrl
@@ -1,6 +1,6 @@
%% =====================================================================
%% Header file for EDoc
-%%
+%%
%% Copyright (C) 2001-2004 Richard Carlsson
%%
%% This library is free software; you can redistribute it and/or modify
@@ -25,9 +25,7 @@
-define(APPLICATION, edoc).
-define(INFO_FILE, "edoc-info").
--define(PACKAGE_FILE, "package.edoc").
-define(OVERVIEW_FILE, "overview.edoc").
--define(PACKAGE_SUMMARY, "package-summary").
-define(DEFAULT_SOURCE_SUFFIX, ".erl").
-define(DEFAULT_FILE_SUFFIX, ".html").
-define(DEFAULT_DOCLET, edoc_doclet).
@@ -65,13 +63,10 @@
%% Environment for generating documentation data
-record(env, {module = [],
- package = [],
root = "",
file_suffix,
- package_summary,
apps,
modules,
- packages,
app_default,
macros = [],
includes = []
diff --git a/lib/edoc/src/edoc_data.erl b/lib/edoc/src/edoc_data.erl
index eceb5cb1bd..b797d74a71 100644
--- a/lib/edoc/src/edoc_data.erl
+++ b/lib/edoc/src/edoc_data.erl
@@ -26,7 +26,7 @@
-module(edoc_data).
--export([module/4, package/4, overview/4, type/2]).
+-export([module/4, overview/4, type/2]).
-export([hidden_filter/2, get_all_tags/1]).
@@ -510,41 +510,14 @@ get_tags(_, []) -> [].
type(T, Env) ->
xmerl_lib:expand_element({type, [edoc_types:to_xml(T, Env)]}).
-%% <!ELEMENT package (description?, author*, copyright?, version?,
-%% since?, deprecated?, see*, reference*, todo?,
-%% modules)>
-%% <!ATTLIST package
-%% name CDATA #REQUIRED
-%% root CDATA #IMPLIED>
-%% <!ELEMENT modules (module+)>
-
-package(Package, Tags, Env, Opts) ->
- Env1 = Env#env{package = Package,
- root = edoc_refs:relative_package_path('', Package)},
- xmerl_lib:expand_element(package_1(Package, Tags, Env1, Opts)).
-
-package_1(Package, Tags, Env, Opts) ->
- {package, [{root, Env#env.root}],
- ([{packageName, [atom_to_list(Package)]}]
- ++ get_doc(Tags)
- ++ authors(Tags)
- ++ get_copyright(Tags)
- ++ get_version(Tags)
- ++ get_since(Tags)
- ++ get_deprecated(Tags)
- ++ sees(Tags, Env)
- ++ references(Tags)
- ++ todos(Tags, Opts))
- }.
-
%% <!ELEMENT overview (title, description?, author*, copyright?, version?,
-%% since?, see*, reference*, todo?, packages, modules)>
+%% since?, see*, reference*, todo?, modules)>
%% <!ATTLIST overview
%% root CDATA #IMPLIED>
%% <!ELEMENT title (#PCDATA)>
overview(Title, Tags, Env, Opts) ->
- Env1 = Env#env{package = '',
+ Env1 = Env#env{
root = ""},
xmerl_lib:expand_element(overview_1(Title, Tags, Env1, Opts)).
diff --git a/lib/edoc/src/edoc_doclet.erl b/lib/edoc/src/edoc_doclet.erl
index 5653b5894b..5961ca8cc0 100644
--- a/lib/edoc/src/edoc_doclet.erl
+++ b/lib/edoc/src/edoc_doclet.erl
@@ -42,9 +42,7 @@
-define(DEFAULT_FILE_SUFFIX, ".html").
-define(INDEX_FILE, "index.html").
-define(OVERVIEW_FILE, "overview.edoc").
--define(PACKAGE_SUMMARY, "package-summary.html").
-define(OVERVIEW_SUMMARY, "overview-summary.html").
--define(PACKAGES_FRAME, "packages-frame.html").
-define(MODULES_FRAME, "modules-frame.html").
-define(STYLESHEET, "stylesheet.css").
-define(IMAGE, "erlang.png").
@@ -52,11 +50,10 @@
-include_lib("xmerl/include/xmerl.hrl").
-%% Sources is the list of inputs in the order they were found. Packages
-%% and Modules are sorted lists of atoms without duplicates. (They
+%% Sources is the list of inputs in the order they were found.
+%% Modules are sorted lists of atoms without duplicates. (They
%% usually include the data from the edoc-info file in the target
-%% directory, if it exists.) Note that the "empty package" is never
-%% included in Packages!
+%% directory, if it exists.)
%% @spec (Command::doclet_gen() | doclet_toc(), edoc_context()) -> ok
%% @doc Main doclet entry point. See the file <a
@@ -117,14 +114,12 @@
run(#doclet_gen{}=Cmd, Ctxt) ->
gen(Cmd#doclet_gen.sources,
Cmd#doclet_gen.app,
- Cmd#doclet_gen.packages,
Cmd#doclet_gen.modules,
- Cmd#doclet_gen.filemap,
Ctxt);
run(#doclet_toc{}=Cmd, Ctxt) ->
toc(Cmd#doclet_toc.paths, Ctxt).
-gen(Sources, App, Packages, Modules, FileMap, Ctxt) ->
+gen(Sources, App, Modules, Ctxt) ->
Dir = Ctxt#context.dir,
Env = Ctxt#context.env,
Options = Ctxt#context.opts,
@@ -132,11 +127,9 @@ gen(Sources, App, Packages, Modules, FileMap, Ctxt) ->
CSS = stylesheet(Options),
{Modules1, Error} = sources(Sources, Dir, Modules, Env, Options),
modules_frame(Dir, Modules1, Title, CSS),
- packages(Packages, Dir, FileMap, Env, Options),
- packages_frame(Dir, Packages, Title, CSS),
overview(Dir, Title, Env, Options),
- index_file(Dir, length(Packages) > 1, Title),
- edoc_lib:write_info_file(App, Packages, Modules1, Dir),
+ index_file(Dir, Title),
+ edoc_lib:write_info_file(App, Modules1, Dir),
copy_stylesheet(Dir, Options),
copy_image(Dir),
%% handle postponed error during processing of source files
@@ -182,19 +175,19 @@ sources(Sources, Dir, Modules, Env, Options) ->
%% set if it was successful. Errors are just flagged at this stage,
%% allowing all source files to be processed even if some of them fail.
-source({M, P, Name, Path}, Dir, Suffix, Env, Set, Private, Hidden,
+source({M, Name, Path}, Dir, Suffix, Env, Set, Private, Hidden,
Error, Options) ->
File = filename:join(Path, Name),
case catch {ok, edoc:get_doc(File, Env, Options)} of
{ok, {Module, Doc}} ->
- check_name(Module, M, P, File),
+ check_name(Module, M, File),
case ((not is_private(Doc)) orelse Private)
andalso ((not is_hidden(Doc)) orelse Hidden) of
true ->
Text = edoc:layout(Doc, Options),
Name1 = atom_to_list(M) ++ Suffix,
Encoding = [{encoding,encoding(Doc)}],
- edoc_lib:write_file(Text, Dir, Name1, P, Encoding),
+ edoc_lib:write_file(Text, Dir, Name1, Encoding),
{sets:add_element(Module, Set), Error};
false ->
{Set, Error}
@@ -204,8 +197,7 @@ source({M, P, Name, Path}, Dir, Suffix, Env, Set, Private, Hidden,
{Set, true}
end.
-check_name(M, M0, P0, File) ->
- P = '',
+check_name(M, M0, File) ->
N = M,
N0 = M0,
case N of
@@ -222,47 +214,12 @@ check_name(M, M0, P0, File) ->
ok
end
end,
- if P =/= P0 ->
- warning("file '~ts' belongs to package '~s', not '~s'.",
- [File, P, P0]);
- true ->
- ok
- end.
-
-
-%% Generating the summary files for packages.
-
-%% INHERIT-OPTIONS: read_file/4
-%% INHERIT-OPTIONS: edoc_lib:run_layout/2
-
-packages(Packages, Dir, FileMap, Env, Options) ->
- lists:foreach(fun (P) ->
- package(P, Dir, FileMap, Env, Options)
- end,
- Packages).
-
-package(P, Dir, FileMap, Env, Opts) ->
- Tags = case FileMap(P) of
- "" ->
- [];
- File ->
- read_file(File, package, Env, Opts)
- end,
- Data = edoc_data:package(P, Tags, Env, Opts),
- F = fun (M) ->
- M:package(Data, Opts)
- end,
- Text = edoc_lib:run_layout(F, Opts),
- edoc_lib:write_file(Text, Dir, ?PACKAGE_SUMMARY, P).
-
+ ok.
%% Creating an index file, with some frames optional.
%% TODO: get rid of frames, or change doctype to Frameset
-index_file(Dir, Packages, Title) ->
- Frame1 = {frame, [{src,?PACKAGES_FRAME},
- {name,"packagesFrame"},{title,""}],
- []},
+index_file(Dir, Title) ->
Frame2 = {frame, [{src,?MODULES_FRAME},
{name,"modulesFrame"},{title,""}],
[]},
@@ -270,16 +227,7 @@ index_file(Dir, Packages, Title) ->
{name,"overviewFrame"},{title,""}],
[]},
Frameset = {frameset, [{cols,"20%,80%"}],
- case Packages of
- true ->
- [?NL,
- {frameset, [{rows,"30%,70%"}],
- [?NL, Frame1, ?NL, Frame2, ?NL]}
- ];
- false ->
- [?NL, Frame2, ?NL]
- end
- ++ [?NL, Frame3, ?NL,
+ [?NL, Frame2, ?NL, ?NL, Frame3, ?NL,
{noframes,
[?NL,
{h2, ["This page uses frames"]},
@@ -296,24 +244,6 @@ index_file(Dir, Packages, Title) ->
Text = xmerl:export_simple([XML], xmerl_html, []),
edoc_lib:write_file(Text, Dir, ?INDEX_FILE).
-packages_frame(Dir, Ps, Title, CSS) ->
- Body = [?NL,
- {h2, [{class, "indextitle"}], ["Packages"]},
- ?NL,
- {table, [{width, "100%"}, {border, 0},
- {summary, "list of packages"}],
- lists:concat(
- [[?NL,
- {tr, [{td, [], [{a, [{href, package_ref(P)},
- {target,"overviewFrame"},
- {class, "package"}],
- [atom_to_list(P)]}]}]}]
- || P <- Ps])},
- ?NL],
- XML = xhtml(Title, CSS, Body),
- Text = xmerl:export_simple([XML], xmerl_html, []),
- edoc_lib:write_file(Text, Dir, ?PACKAGES_FRAME).
-
modules_frame(Dir, Ms, Title, CSS) ->
Body = [?NL,
{h2, [{class, "indextitle"}], ["Modules"]},
@@ -334,11 +264,7 @@ modules_frame(Dir, Ms, Title, CSS) ->
edoc_lib:write_file(Text, Dir, ?MODULES_FRAME).
module_ref(M) ->
- edoc_refs:relative_package_path(M, '') ++ ?DEFAULT_FILE_SUFFIX.
-
-package_ref(P) ->
- edoc_lib:join_uri(edoc_refs:relative_package_path(P, ''),
- ?PACKAGE_SUMMARY).
+ atom_to_list(M) ++ ?DEFAULT_FILE_SUFFIX.
xhtml(Title, CSS, Content) ->
xhtml_1(Title, CSS, {body, [{bgcolor, "white"}], Content}).
@@ -372,7 +298,7 @@ overview(Dir, Title, Env, Opts) ->
end,
Text = edoc_lib:run_layout(F, Opts),
EncOpts = [{encoding,Encoding}],
- edoc_lib:write_file(Text, Dir, ?OVERVIEW_SUMMARY, '', EncOpts).
+ edoc_lib:write_file(Text, Dir, ?OVERVIEW_SUMMARY, EncOpts).
copy_image(Dir) ->
case code:priv_dir(?EDOC_APP) of
@@ -505,7 +431,7 @@ app_index_file(Paths, Dir, Env, Options) ->
% Priv = proplists:get_bool(private, Options),
CSS = stylesheet(Options),
Apps1 = [{filename:dirname(A),filename:basename(A)} || A <- Paths],
- index_file(Dir, false, Title),
+ index_file(Dir, Title),
application_frame(Dir, Apps1, Title, CSS),
modules_frame(Dir, [], Title, CSS),
overview(Dir, Title, Env, Options),
diff --git a/lib/edoc/src/edoc_extract.erl b/lib/edoc/src/edoc_extract.erl
index 67a95e80aa..6d34de3a85 100644
--- a/lib/edoc/src/edoc_extract.erl
+++ b/lib/edoc/src/edoc_extract.erl
@@ -121,10 +121,8 @@ source1(Tree, File0, Env, Opts, TypeDocs) ->
Module = get_module_info(Tree, File),
{Header, Footer, Entries} = collect(Forms, Module),
Name = Module#module.name,
- Package = '',
Env1 = Env#env{module = Name,
- package = Package,
- root = edoc_refs:relative_package_path('', Package)},
+ root = ""},
Env2 = add_macro_defs(module_macros(Env1), Opts, Env1),
Entries1 = get_tags([Header, Footer | Entries], Env2, File, TypeDocs),
Entries2 = edoc_specs:add_data(Entries1, Opts, File, Module),
@@ -218,7 +216,7 @@ add_macro_defs(Defs0, Opts, Env) ->
%% @spec file(File::filename(), Context, Env::edoc_env(),
%% Options::proplist()) -> {ok, Tags} | {error, Reason}
-%% Context = overview | package
+%% Context = overview
%% Tags = [term()]
%% Reason = term()
%%
@@ -249,7 +247,7 @@ file(File, Context, Env, Opts) ->
%% @spec (Text::string(), Context, Env::edoc_env(),
%% Options::proplist()) -> Tags
-%% Context = overview | package
+%% Context = overview
%% Tags = [term()]
%%
%% @doc Returns the list of tags in the text. Any lines of text before
@@ -353,8 +351,6 @@ preprocess_forms_2(F, Fs) ->
[F | preprocess_forms_1(Fs)];
{function, _} ->
[F | preprocess_forms_1(Fs)];
- {rule, _} ->
- [F | preprocess_forms_1(Fs)];
{attribute, {module, _}} ->
[F | preprocess_forms_1(Fs)];
text ->
@@ -392,15 +388,6 @@ collect([F | Fs], Cs, Ss, Ts, As, Header, Mod) ->
export = Export,
data = {comment_text(Cs),Ss,Ts}} | As],
Header, Mod);
- {rule, Name} ->
- L = erl_syntax:get_pos(F),
- Export = ordsets:is_element(Name, Mod#module.exports),
- Args = parameters(erl_syntax:rule_clauses(F)),
- collect(Fs, [], [], [],
- [#entry{name = Name, args = Args, line = L,
- export = Export,
- data = {comment_text(Cs),Ss,Ts}} | As],
- Header, Mod);
{attribute, {module, _}} when Header =:= undefined ->
L = erl_syntax:get_pos(F),
collect(Fs, [], [], [], As,
diff --git a/lib/edoc/src/edoc_layout.erl b/lib/edoc/src/edoc_layout.erl
index 36d067d9bc..6309e88475 100644
--- a/lib/edoc/src/edoc_layout.erl
+++ b/lib/edoc/src/edoc_layout.erl
@@ -27,7 +27,7 @@
-module(edoc_layout).
--export([module/2, package/2, overview/2, type/1]).
+-export([module/2, overview/2, type/1]).
-import(edoc_report, [report/2]).
@@ -978,9 +978,6 @@ get_text(Name, Es) ->
local_label(R) ->
"#" ++ R.
-xhtml(Title, CSS, Body) ->
- xhtml(Title, CSS, Body, "latin1").
-
xhtml(Title, CSS, Body, Encoding) ->
EncString = case Encoding of
"latin1" -> "ISO-8859-1";
@@ -1010,27 +1007,6 @@ type(E, Ds) ->
xmerl:export_simple_content(t_utype_elem(E) ++ local_defs(Ds, Opts),
?HTML_EXPORT).
-package(E=#xmlElement{name = package, content = Es}, Options) ->
- Opts = init_opts(E, Options),
- Name = get_text(packageName, Es),
- Title = ["Package ", Name],
- Desc = get_content(description, Es),
-% ShortDesc = get_content(briefDescription, Desc),
- FullDesc = get_content(fullDescription, Desc),
- Body = ([?NL, {h1, [Title]}, ?NL]
-% ++ ShortDesc
- ++ copyright(Es)
- ++ deprecated(Es, "package")
- ++ version(Es)
- ++ since(Es)
- ++ authors(Es)
- ++ references(Es)
- ++ sees(Es)
- ++ todos(Es)
- ++ FullDesc),
- XML = xhtml(Title, stylesheet(Opts), Body),
- xmerl:export_simple(XML, ?HTML_EXPORT, []).
-
overview(E=#xmlElement{name = overview, content = Es}, Options) ->
Opts = init_opts(E, Options),
Title = [get_text(title, Es)],
diff --git a/lib/edoc/src/edoc_lib.erl b/lib/edoc/src/edoc_lib.erl
index c46338a2e1..813fcf2476 100644
--- a/lib/edoc/src/edoc_lib.erl
+++ b/lib/edoc/src/edoc_lib.erl
@@ -29,9 +29,9 @@
get_first_sentence/1, is_space/1, strip_space/1, parse_expr/2,
parse_contact/2, escape_uri/1, join_uri/2, is_relative_uri/1,
is_name/1, to_label/1, find_doc_dirs/0, find_sources/2,
- find_sources/3, find_file/3, try_subdir/2, unique/1,
- write_file/3, write_file/4, write_file/5, write_info_file/4,
- read_info_file/1, get_doc_env/1, get_doc_env/4, copy_file/2,
+ find_file/2, try_subdir/2, unique/1,
+ write_file/3, write_file/4, write_info_file/3,
+ read_info_file/1, get_doc_env/1, get_doc_env/3, copy_file/2,
uri_get/1, run_doclet/2, run_layout/2,
simplify_path/1, timestr/1, datestr/1, read_encoding/2]).
@@ -266,13 +266,6 @@ is_name_1([$_ | Cs]) ->
is_name_1([]) -> true;
is_name_1(_) -> false.
-to_atom(A) when is_atom(A) -> A;
-to_atom(S) when is_list(S) -> list_to_atom(S).
-
-to_list(A) when is_atom(A) -> atom_to_list(A);
-to_list(S) when is_list(S) -> S.
-
-
%% @private
unique([X | Xs]) -> [X | unique(Xs, X)];
unique([]) -> [].
@@ -674,7 +667,7 @@ simplify_path(P) ->
try_subdir(Dir, Subdir) ->
D = filename:join(Dir, Subdir),
case filelib:is_dir(D) of
- true -> D;
+ true -> D;
false -> Dir
end.
@@ -686,19 +679,10 @@ try_subdir(Dir, Subdir) ->
%% @private
write_file(Text, Dir, Name) ->
- write_file(Text, Dir, Name, '').
-
-%% @spec (Text::deep_string(), Dir::edoc:filename(),
-%% Name::edoc:filename(), Package::atom()|string()) -> ok
-%% @doc Like {@link write_file/3}, but adds path components to the target
-%% directory corresponding to the specified package.
-%% @private
+ write_file(Text, Dir, Name, [{encoding,latin1}]).
-write_file(Text, Dir, Name, Package) ->
- write_file(Text, Dir, Name, Package, [{encoding,latin1}]).
-
-write_file(Text, Dir, Name, Package, Options) ->
- File = filename:join([Dir, to_list(Package), Name]),
+write_file(Text, Dir, Name, Options) ->
+ File = filename:join([Dir, Name]),
ok = filelib:ensure_dir(File),
case file:open(File, [write] ++ Options) of
{ok, FD} ->
@@ -711,15 +695,14 @@ write_file(Text, Dir, Name, Package, Options) ->
end.
%% @private
-write_info_file(App, Packages, Modules, Dir) ->
- Ts = [{packages, Packages},
- {modules, Modules}],
+write_info_file(App, Modules, Dir) ->
+ Ts = [{modules, Modules}],
Ts1 = if App =:= ?NO_APP -> Ts;
true -> [{application, App} | Ts]
end,
S0 = [io_lib:fwrite("~p.\n", [T]) || T <- Ts1],
S = ["%% encoding: UTF-8\n" | S0],
- write_file(S, Dir, ?INFO_FILE, '', [{encoding,unicode}]).
+ write_file(S, Dir, ?INFO_FILE, [{encoding,unicode}]).
%% @spec (Name::edoc:filename()) -> {ok, string()} | {error, Reason}
%%
@@ -744,9 +727,8 @@ read_file(File) ->
info_file_data(Ts) ->
App = proplists:get_value(application, Ts, ?NO_APP),
- Ps = proplists:append_values(packages, Ts),
Ms = proplists:append_values(modules, Ts),
- {App, Ps, Ms}.
+ {App, Ms}.
%% Local file access - don't complain if file does not exist.
@@ -761,10 +743,10 @@ read_info_file(Dir) ->
{error, R} ->
R1 = file:format_error(R),
warning("could not read '~ts': ~ts.", [File, R1]),
- {?NO_APP, [], []}
- end;
+ {?NO_APP, []}
+ end;
false ->
- {?NO_APP, [], []}
+ {?NO_APP, []}
end.
%% URI access
@@ -776,7 +758,7 @@ uri_get_info_file(Base) ->
parse_info_file(Text, URI);
{error, Msg} ->
warning("could not read '~ts': ~ts.", [URI, Msg]),
- {?NO_APP, [], []}
+ {?NO_APP, []}
end.
parse_info_file(Text, Name) ->
@@ -785,10 +767,10 @@ parse_info_file(Text, Name) ->
info_file_data(Vs);
{error, eof} ->
warning("unexpected end of file in '~ts'.", [Name]),
- {?NO_APP, [], []};
+ {?NO_APP, []};
{error, {_Line,Module,R}} ->
warning("~ts: ~ts.", [Module:format_error(R), Name]),
- {?NO_APP, [], []}
+ {?NO_APP, []}
end.
parse_terms(Text) ->
@@ -815,82 +797,67 @@ parse_terms_1([], _As, _Vs) ->
%% ---------------------------------------------------------------------
-%% Source files and packages
-
-%% @private
-find_sources(Path, Opts) ->
- find_sources(Path, "", Opts).
+%% Source files
-%% @doc See {@link edoc:run/3} for a description of the options
-%% `subpackages', `source_suffix' and `exclude_packages'.
+%% @doc See {@link edoc:run/2} for a description of the options
+%% `subpackages', `source_suffix'.
%% @private
-%% NEW-OPTIONS: subpackages, source_suffix, exclude_packages
-%% DEFER-OPTIONS: edoc:run/3
+%% NEW-OPTIONS: subpackages, source_suffix
+%% DEFER-OPTIONS: edoc:run/2
-find_sources(Path, Pkg, Opts) ->
+find_sources(Path, Opts) ->
Rec = proplists:get_bool(subpackages, Opts),
Ext = proplists:get_value(source_suffix, Opts, ?DEFAULT_SOURCE_SUFFIX),
- find_sources(Path, Pkg, Rec, Ext, Opts).
+ find_sources(Path, Rec, Ext, Opts).
-find_sources(Path, Pkg, Rec, Ext, Opts) ->
- Skip = proplists:get_value(exclude_packages, Opts, []),
- lists:flatten(find_sources_1(Path, to_atom(Pkg), Rec, Ext, Skip)).
+find_sources(Path, Rec, Ext, _Opts) ->
+ lists:flatten(find_sources_1(Path, Rec, Ext)).
-find_sources_1([P | Ps], Pkg, Rec, Ext, Skip) ->
- Dir = filename:join(P, atom_to_list(Pkg)),
- Fs1 = find_sources_1(Ps, Pkg, Rec, Ext, Skip),
+find_sources_1([P | Ps], Rec, Ext) ->
+ Dir = P,
+ Fs1 = find_sources_1(Ps, Rec, Ext),
case filelib:is_dir(Dir) of
true ->
- [find_sources_2(Dir, Pkg, Rec, Ext, Skip) | Fs1];
+ [find_sources_2(Dir, Rec, Ext) | Fs1];
false ->
Fs1
end;
-find_sources_1([], _Pkg, _Rec, _Ext, _Skip) ->
+find_sources_1([], _Rec, _Ext) ->
[].
-find_sources_2(Dir, Pkg, Rec, Ext, Skip) ->
- case lists:member(Pkg, Skip) of
- false ->
- Es = list_dir(Dir, false), % just warn if listing fails
- Es1 = [{Pkg, E, Dir} || E <- Es, is_source_file(E, Ext)],
- case Rec of
+find_sources_2(Dir, Rec, Ext) ->
+ Es = list_dir(Dir, false), % just warn if listing fails
+ Es1 = [{E, Dir} || E <- Es, is_source_file(E, Ext)],
+ case Rec of
true ->
- [find_sources_3(Es, Dir, Pkg, Rec, Ext, Skip) | Es1];
+ [find_sources_3(Es, Dir, Rec, Ext) | Es1];
false ->
- Es1
- end;
- true ->
- []
- end.
+ Es1
+ end.
-find_sources_3(Es, Dir, Pkg, Rec, Ext, Skip) ->
+find_sources_3(Es, Dir, Rec, Ext) ->
[find_sources_2(filename:join(Dir, E),
- to_atom(join(Pkg, E)), Rec, Ext, Skip)
- || E <- Es, is_package_dir(E, Dir)].
-
-join('', E) -> E;
-join(Pkg, E) -> filename:join(Pkg, E).
+ Rec, Ext)
+ || E <- Es, is_source_dir(E, Dir)].
is_source_file(Name, Ext) ->
(filename:extension(Name) == Ext)
andalso is_name(filename:rootname(Name, Ext)).
-is_package_dir(Name, Dir) ->
- is_name(filename:rootname(filename:basename(Name)))
- andalso filelib:is_dir(filename:join(Dir, Name)).
+is_source_dir(Name, Dir) ->
+ filelib:is_dir(filename:join(Dir, Name)).
%% @private
-find_file([P | Ps], []=Pkg, Name) ->
- Pkg = [],
+find_file([P | Ps], Name) ->
File = filename:join(P, Name),
case filelib:is_file(File) of
true ->
- File;
+ File;
false ->
- find_file(Ps, Pkg, Name)
- end;
-find_file([], [], _Name) ->
+ find_file(Ps, Name)
+ end;
+find_file([], _Name) ->
"".
%% @private
@@ -909,7 +876,7 @@ find_doc_dirs([P0 | Ps]) ->
File = filename:join(Dir, ?INFO_FILE),
case filelib:is_file(File) of
true ->
- [Dir | find_doc_dirs(Ps)];
+ [Dir | find_doc_dirs(Ps)];
false ->
find_doc_dirs(Ps)
end;
@@ -923,22 +890,21 @@ find_doc_dirs([]) ->
%% NEW-OPTIONS: doc_path
%% DEFER-OPTIONS: get_doc_env/4
-get_doc_links(App, Packages, Modules, Opts) ->
+get_doc_links(App, Modules, Opts) ->
Path = proplists:append_values(doc_path, Opts) ++ find_doc_dirs(),
Ds = [{P, uri_get_info_file(P)} || P <- Path],
- Ds1 = [{"", {App, Packages, Modules}} | Ds],
+ Ds1 = [{"", {App, Modules}} | Ds],
D = dict:new(),
- make_links(Ds1, D, D, D).
+ make_links(Ds1, D, D).
-make_links([{Dir, {App, Ps, Ms}} | Ds], A, P, M) ->
+make_links([{Dir, {App, Ms}} | Ds], A, M) ->
A1 = if App == ?NO_APP -> A;
true -> add_new(App, Dir, A)
end,
F = fun (K, D) -> add_new(K, Dir, D) end,
- P1 = lists:foldl(F, P, Ps),
M1 = lists:foldl(F, M, Ms),
- make_links(Ds, A1, P1, M1);
-make_links([], A, P, M) ->
+ make_links(Ds, A1, M1);
+make_links([], A, M) ->
F = fun (D) ->
fun (K) ->
case dict:find(K, D) of
@@ -947,7 +913,7 @@ make_links([], A, P, M) ->
end
end
end,
- {F(A), F(P), F(M)}.
+ {F(A), F(M)}.
add_new(K, V, D) ->
case dict:is_key(K, D) of
@@ -962,11 +928,10 @@ add_new(K, V, D) ->
%% @private
get_doc_env(Opts) ->
- get_doc_env([], [], [], Opts).
+ get_doc_env([], [], Opts).
-%% @spec (App, Packages, Modules, Options::proplist()) -> edoc_env()
+%% @spec (App, Modules, Options::proplist()) -> edoc_env()
%% App = [] | atom()
-%% Packages = [atom()]
%% Modules = [atom()]
%% proplist() = [term()]
%%
@@ -985,17 +950,15 @@ get_doc_env(Opts) ->
%% INHERIT-OPTIONS: get_doc_links/4
%% DEFER-OPTIONS: edoc:run/3
-get_doc_env(App, Packages, Modules, Opts) ->
+get_doc_env(App, Modules, Opts) ->
Suffix = proplists:get_value(file_suffix, Opts,
?DEFAULT_FILE_SUFFIX),
AppDefault = proplists:get_value(app_default, Opts, ?APP_DEFAULT),
Includes = proplists:append_values(includes, Opts),
- {A, P, M} = get_doc_links(App, Packages, Modules, Opts),
+ {A, M} = get_doc_links(App, Modules, Opts),
#env{file_suffix = Suffix,
- package_summary = ?PACKAGE_SUMMARY ++ Suffix,
apps = A,
- packages = P,
modules = M,
app_default = AppDefault,
includes = Includes
diff --git a/lib/edoc/src/edoc_macros.erl b/lib/edoc/src/edoc_macros.erl
index 8efbfd00c7..bdcb3fe81f 100644
--- a/lib/edoc/src/edoc_macros.erl
+++ b/lib/edoc/src/edoc_macros.erl
@@ -40,10 +40,6 @@ std_macros(Env) ->
true -> [{module, atom_to_list(Env#env.module)}]
end
++
- if Env#env.package =:= [] -> [];
- true -> [{package, atom_to_list(Env#env.package)}]
- end
- ++
[{date, fun date_macro/3},
{docRoot, Env#env.root},
{link, fun link_macro/3},
diff --git a/lib/edoc/src/edoc_parser.yrl b/lib/edoc/src/edoc_parser.yrl
index c6f8a04775..48c01c8dce 100644
--- a/lib/edoc/src/edoc_parser.yrl
+++ b/lib/edoc/src/edoc_parser.yrl
@@ -28,7 +28,7 @@
Nonterminals
start spec func_type utype_list utype_tuple utypes utype ptypes ptype
nutype function_name where_defs defs defs2 def typedef etype
-throws qname ref aref mref lref pref var_list vars fields field
+throws qname ref aref mref lref var_list vars fields field
utype_map utype_map_fields utype_map_field
futype_list bin_base_type bin_unit_type.
@@ -207,14 +207,11 @@ typedef -> atom var_list '=' utype where_defs:
ref -> aref: '$1'.
ref -> mref: '$1'.
ref -> lref: '$1'.
-ref -> pref: '$1'.
aref -> '//' atom:
edoc_refs:app(tok_val('$2')).
aref -> '//' atom '/' mref:
edoc_refs:app(tok_val('$2'), '$4').
-aref -> '//' atom '/' pref:
- edoc_refs:app(tok_val('$2'), '$4').
mref -> qname ':' atom '/' integer:
edoc_refs:function(qname('$1'), tok_val('$3'), tok_val('$5')).
@@ -223,9 +220,6 @@ mref -> qname ':' atom '(' ')':
mref -> qname:
edoc_refs:module(qname('$1')).
-pref -> qname '.' '*':
- edoc_refs:package(qname('$1')).
-
lref -> atom '/' integer:
edoc_refs:function(tok_val('$1'), tok_val('$3')).
lref -> atom '(' ')':
@@ -399,7 +393,7 @@ parse_typedef_1(S, L) ->
%% @doc Parses a <a
%% href="overview-summary.html#References">reference</a> to a module,
-%% package, function, type, or application
+%% function, type, or application
parse_ref(S, L) ->
case edoc_scanner:string(S, L) of
diff --git a/lib/edoc/src/edoc_refs.erl b/lib/edoc/src/edoc_refs.erl
index ea439490ed..b9a9391053 100644
--- a/lib/edoc/src/edoc_refs.erl
+++ b/lib/edoc/src/edoc_refs.erl
@@ -27,10 +27,9 @@
-module(edoc_refs).
--export([app/1, app/2, package/1, module/1, module/2, module/3,
+-export([app/1, app/2, module/1, module/2, module/3,
function/2, function/3, function/4, type/1, type/2, type/3,
- to_string/1, to_label/1, get_uri/2, is_top/2,
- relative_module_path/2, relative_package_path/2]).
+ to_string/1, to_label/1, get_uri/2, is_top/2]).
-import(edoc_lib, [join_uri/2, escape_uri/1]).
@@ -56,9 +55,6 @@ module(M, Ref) ->
module(App, M, Ref) ->
app(App, module(M, Ref)).
-package(P) ->
- {package, P}.
-
function(F, A) ->
{function, F, A}.
@@ -88,8 +84,6 @@ to_string({module, M}) ->
atom_to_list(M) ;
to_string({module, M, Ref}) ->
atom_to_list(M) ++ ":" ++ to_string(Ref);
-to_string({package, P}) ->
- atom_to_list(P) ++ ".*";
to_string({function, F, A}) ->
atom_to_list(F) ++ "/" ++ integer_to_list(A);
to_string({type, T}) ->
@@ -111,24 +105,19 @@ get_uri({module, M, Ref}, Env) ->
module_ref(M, Env) ++ "#" ++ to_label(Ref);
get_uri({module, M}, Env) ->
module_ref(M, Env);
-get_uri({package, P}, Env) ->
- package_ref(P, Env);
get_uri(Ref, _Env) ->
"#" ++ to_label(Ref).
abs_uri({module, M}, Env) ->
module_absref(M, Env);
abs_uri({module, M, Ref}, Env) ->
- module_absref(M, Env) ++ "#" ++ to_label(Ref);
-abs_uri({package, P}, Env) ->
- package_absref(P, Env).
+ module_absref(M, Env) ++ "#" ++ to_label(Ref).
module_ref(M, Env) ->
case (Env#env.modules)(M) of
"" ->
File = atom_to_list(M) ++ Env#env.file_suffix,
- Path = relative_module_path(M, Env#env.package),
- join_uri(Path, escape_uri(File));
+ escape_uri(File);
Base ->
join_uri(Base, module_absref(M, Env))
end.
@@ -136,19 +125,6 @@ module_ref(M, Env) ->
module_absref(M, Env) ->
escape_uri(atom_to_list(M)) ++ escape_uri(Env#env.file_suffix).
-package_ref(P, Env) ->
- case (Env#env.packages)(P) of
- "" ->
- join_uri(relative_package_path(P, Env#env.package),
- escape_uri(Env#env.package_summary));
- Base ->
- join_uri(Base, package_absref(P, Env))
- end.
-
-package_absref(P, Env) ->
- join_uri(escape_uri(atom_to_list(P)),
- escape_uri(Env#env.package_summary)).
-
app_ref(A, Env) ->
case (Env#env.apps)(A) of
"" ->
@@ -166,43 +142,3 @@ is_top({app, _App}, _Env) ->
is_top(_Ref, _Env) ->
false.
-%% Each segment of a path must be separately escaped before joining.
-
-join_segments([S]) ->
- escape_uri(S);
-join_segments([S | Ss]) ->
- join_uri(escape_uri(S), join_segments(Ss)).
-
-%% 'From' is always the "current package" here:
-
-%% The empty string is returned if the To module has only one segment,
-%% implying a local reference.
-
-relative_module_path(_To, _From) ->
- "".
-
-relative_package_path(To, From) ->
- relative_path([atom_to_list(To)], [atom_to_list(From)]).
-
-%% This takes two lists of path segments (From, To). Note that an empty
-%% string will be returned if the paths are the same. Empty leading
-%% segments are stripped from both paths.
-
-relative_path(Ts, ["" | Fs]) ->
- relative_path(Ts, Fs);
-relative_path(["" | Ts], Fs) ->
- relative_path(Ts, Fs);
-relative_path(Ts, Fs) ->
- relative_path_1(Ts, Fs).
-
-relative_path_1([T | Ts], [F | Fs]) when F == T ->
- relative_path_1(Ts, Fs);
-relative_path_1(Ts, Fs) ->
- relative_path_2(Fs, Ts).
-
-relative_path_2([_F | Fs], Ts) ->
- relative_path_2(Fs, [".." | Ts]);
-relative_path_2([], []) ->
- "";
-relative_path_2([], Ts) ->
- join_segments(Ts).
diff --git a/lib/edoc/src/edoc_run.erl b/lib/edoc/src/edoc_run.erl
index b5a1ef713d..9a569d0879 100644
--- a/lib/edoc/src/edoc_run.erl
+++ b/lib/edoc/src/edoc_run.erl
@@ -17,7 +17,7 @@
%% @copyright 2003 Richard Carlsson
%% @author Richard Carlsson <[email protected]>
%% @see edoc
-%% @end
+%% @end
%% =====================================================================
%% @doc Interface for calling EDoc from Erlang startup options.
@@ -38,7 +38,7 @@
-module(edoc_run).
--export([file/1, application/1, packages/1, files/1, toc/1]).
+-export([file/1, application/1, files/1, toc/1]).
-compile({no_auto_import,[error/1]}).
@@ -92,28 +92,6 @@ files(Args) ->
end,
run(F).
-%% @spec packages([string()]) -> none()
-%%
-%% @doc Calls {@link edoc:application/2} with the corresponding
-%% arguments. The strings in the list are parsed as Erlang constant
-%% terms. The list can be either `[Packages]' or `[Packages, Options]'.
-%% In the first case {@link edoc:application/1} is called instead.
-%%
-%% The function call never returns; instead, the emulator is
-%% automatically terminated when the call has completed, signalling
-%% success or failure to the operating system.
-
-packages(Args) ->
- F = fun () ->
- case parse_args(Args) of
- [Packages] -> edoc:packages(Packages);
- [Packages, Opts] -> edoc:packages(Packages, Opts);
- _ ->
- invalid_args("edoc_run:packages/1", Args)
- end
- end,
- run(F).
-
%% @hidden Not official yet
toc(Args) ->
F = fun () ->
@@ -131,8 +109,8 @@ toc(Args) ->
%%
%% @deprecated This is part of the old interface to EDoc and is mainly
%% kept for backwards compatibility. The preferred way of generating
-%% documentation is through one of the functions {@link application/1},
-%% {@link packages/1} and {@link files/1}.
+%% documentation is through one of the functions {@link application/1}
+%% and {@link files/1}.
%%
%% @doc Calls {@link edoc:file/2} with the corresponding arguments. The
%% strings in the list are parsed as Erlang constant terms. The list can
diff --git a/lib/edoc/src/edoc_tags.erl b/lib/edoc/src/edoc_tags.erl
index 82a1b72d84..c1c453511a 100644
--- a/lib/edoc/src/edoc_tags.erl
+++ b/lib/edoc/src/edoc_tags.erl
@@ -42,7 +42,7 @@
%% Name = atom()
%% Parser = text | xml | (Text,Line,Where) -> term()
%% Flags = [Flag]
-%% Flag = module | function | package | overview | single
+%% Flag = module | function | overview | single
%%
%% Note that the pseudo-tag '@clear' is not listed here.
%% (Cf. the function 'filter_tags'.)
@@ -57,11 +57,11 @@
%% - @category (useless; superseded by keywords or free text search)
tags() ->
- All = [module,footer,function,package,overview],
- [{author, fun parse_contact/4, [module,package,overview]},
- {copyright, text, [module,package,overview,single]},
- {deprecated, xml, [module,function,package,single]},
- {doc, xml, [module,function,package,overview,single]},
+ All = [module,footer,function,overview],
+ [{author, fun parse_contact/4, [module,overview]},
+ {copyright, text, [module,overview,single]},
+ {deprecated, xml, [module,function,single]},
+ {doc, xml, [module,function,overview,single]},
{docfile, fun parse_file/4, All},
{'end', text, All},
{equiv, fun parse_expr/4, [function,single]},
@@ -69,17 +69,17 @@ tags() ->
{hidden, text, [module,function,single]},
{param, fun parse_param/4, [function]},
{private, text, [module,function,single]},
- {reference, xml, [module,footer,package,overview]},
+ {reference, xml, [module,footer,overview]},
{returns, xml, [function,single]},
- {see, fun parse_see/4, [module,function,package,overview]},
- {since, text, [module,function,package,overview,single]},
+ {see, fun parse_see/4, [module,function,overview]},
+ {since, text, [module,function,overview,single]},
{spec, fun parse_spec/4, [function,single]},
{throws, fun parse_throws/4, [function,single]},
{title, text, [overview,single]},
{'TODO', xml, All},
{todo, xml, All},
{type, fun parse_typedef/4, [module,footer,function]},
- {version, text, [module,package,overview,single]}].
+ {version, text, [module,overview,single]}].
aliases('TODO') -> todo;
aliases(return) -> returns;
@@ -369,7 +369,7 @@ parse_header(Data, Line, Env, Where) when is_list(Where) ->
{string, _, File} ->
Dir = filename:dirname(Where),
Path = Env#env.includes ++ [Dir],
- case edoc_lib:find_file(Path, "", File) of
+ case edoc_lib:find_file(Path, File) of
"" ->
throw_error(Line, {file_not_found, File});
File1 ->
diff --git a/lib/edoc/src/otpsgml_layout.erl b/lib/edoc/src/otpsgml_layout.erl
index 2c4cd919bb..052c75b9d4 100644
--- a/lib/edoc/src/otpsgml_layout.erl
+++ b/lib/edoc/src/otpsgml_layout.erl
@@ -28,7 +28,7 @@
-module(otpsgml_layout).
--export([module/2, package/2, overview/2,type/1]).
+-export([module/2, overview/2,type/1]).
-import(edoc_report, [report/2]).
@@ -811,27 +811,6 @@ xml(Title, CSS, Body) ->
xmerl:export_simple_content(t_utype_elem(E) ++ local_defs(Ds),
?SGML_EXPORT).
-
-package(E=#xmlElement{name = package, content = Es}, Options) ->
- Opts = init_opts(E, Options),
- Name = get_text(packageName, Es),
- Title = io_lib:fwrite("Package ~s", [Name]),
- Desc = get_content(description, Es),
-% ShortDesc = get_content(briefDescription, Desc),
- FullDesc = get_content(fullDescription, Desc),
- Body = ([?NL, {h1, [Title]}, ?NL]
-% ++ ShortDesc
- ++ copyright(Es)
- ++ deprecated(Es, "package")
- ++ version(Es)
- ++ since(Es)
- ++ authors(Es)
- ++ references(Es)
- ++ sees(Es)
- ++ FullDesc),
- XML = xml(Title, stylesheet(Opts), Body),
- xmerl:export_simple([XML], ?SGML_EXPORT, []).
-
overview(E=#xmlElement{name = overview, content = Es}, Options) ->
Opts = init_opts(E, Options),
Title = get_text(title, Es),
@@ -843,6 +822,7 @@ overview(E=#xmlElement{name = overview, content = Es}, Options) ->
++ copyright(Es)
++ version(Es)
++ since(Es)
+ ++ deprecated(Es, "application")
++ authors(Es)
++ references(Es)
++ sees(Es)
diff --git a/lib/edoc/test/edoc_SUITE.erl b/lib/edoc/test/edoc_SUITE.erl
index c63660c8c0..6b23054ce3 100644
--- a/lib/edoc/test/edoc_SUITE.erl
+++ b/lib/edoc/test/edoc_SUITE.erl
@@ -22,12 +22,12 @@
init_per_group/2,end_per_group/2]).
%% Test cases
--export([app/1,appup/1,build_std/1,build_map_module/1,otp_12008/1]).
+-export([app/1,appup/1,build_std/1,build_map_module/1,otp_12008/1, build_app/1]).
suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
- [app,appup,build_std,build_map_module,otp_12008].
+ [app,appup,build_std,build_map_module,otp_12008, build_app].
groups() ->
[].
@@ -95,3 +95,20 @@ otp_12008(Config) when is_list(Config) ->
ok = edoc:files([Un2], Opts2),
{'EXIT', error} = (catch edoc:files([Un3], Opts2)),
ok.
+
+build_app(suite) -> [];
+build_app(doc) -> ["Build a local app with nested source directories"];
+build_app(Config) ->
+ DataDir = ?config(data_dir, Config),
+ PrivDir = ?config(priv_dir, Config),
+ OutDir = filename:join(PrivDir, "myapp"),
+ Src = filename:join(DataDir, "myapp"),
+
+ ok = edoc:application(myapp, Src, [{dir, OutDir}, {subpackages, false}]),
+ true = filelib:is_regular(filename:join(OutDir, "a.html")),
+ false = filelib:is_regular(filename:join(OutDir, "b.html")),
+
+ ok = edoc:application(myapp, Src, [{dir, OutDir}]),
+ true = filelib:is_regular(filename:join(OutDir, "a.html")),
+ true = filelib:is_regular(filename:join(OutDir, "b.html")),
+ ok.
diff --git a/lib/edoc/test/edoc_SUITE_data/myapp/doc/.dummy b/lib/edoc/test/edoc_SUITE_data/myapp/doc/.dummy
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/edoc/test/edoc_SUITE_data/myapp/doc/.dummy
diff --git a/lib/edoc/test/edoc_SUITE_data/myapp/src/a.erl b/lib/edoc/test/edoc_SUITE_data/myapp/src/a.erl
new file mode 100644
index 0000000000..1b5b704551
--- /dev/null
+++ b/lib/edoc/test/edoc_SUITE_data/myapp/src/a.erl
@@ -0,0 +1 @@
+-module(a).
diff --git a/lib/edoc/test/edoc_SUITE_data/myapp/src/src_1/b.erl b/lib/edoc/test/edoc_SUITE_data/myapp/src/src_1/b.erl
new file mode 100644
index 0000000000..6d6f15dfe5
--- /dev/null
+++ b/lib/edoc/test/edoc_SUITE_data/myapp/src/src_1/b.erl
@@ -0,0 +1 @@
+-module(b).
diff --git a/lib/eldap/src/eldap.erl b/lib/eldap/src/eldap.erl
index 977f7d2809..ae47c815c9 100644
--- a/lib/eldap/src/eldap.erl
+++ b/lib/eldap/src/eldap.erl
@@ -413,7 +413,7 @@ parse_args([{port, Port}|T], Cpid, Data) when is_integer(Port) ->
parse_args([{timeout, Timeout}|T], Cpid, Data) when is_integer(Timeout),Timeout>0 ->
parse_args(T, Cpid, Data#eldap{timeout = Timeout});
parse_args([{anon_auth, true}|T], Cpid, Data) ->
- parse_args(T, Cpid, Data#eldap{anon_auth = false});
+ parse_args(T, Cpid, Data#eldap{anon_auth = true});
parse_args([{anon_auth, _}|T], Cpid, Data) ->
parse_args(T, Cpid, Data);
parse_args([{ssl, true}|T], Cpid, Data) ->
diff --git a/lib/erl_docgen/priv/bin/specs_gen.escript b/lib/erl_docgen/priv/bin/specs_gen.escript
index 156311565c..e8a8f14e3a 100644
--- a/lib/erl_docgen/priv/bin/specs_gen.escript
+++ b/lib/erl_docgen/priv/bin/specs_gen.escript
@@ -97,7 +97,7 @@ read_file(File, Opts) ->
edoc:read_source(File, Opts).
extract(File, Forms, Opts) ->
- Env = edoc_lib:get_doc_env([], [], [], _Opts=[]),
+ Env = edoc_lib:get_doc_env([], [], _Opts=[]),
{_Module, Doc} = edoc_extract:source(Forms, File, Env, Opts),
Doc.
diff --git a/lib/erl_docgen/priv/bin/xml_from_edoc.escript b/lib/erl_docgen/priv/bin/xml_from_edoc.escript
index 65a580dca2..007546e7ba 100755
--- a/lib/erl_docgen/priv/bin/xml_from_edoc.escript
+++ b/lib/erl_docgen/priv/bin/xml_from_edoc.escript
@@ -117,7 +117,7 @@ users_guide(File, Args) ->
Text = edoc_lib:run_layout(F, Opts),
OutFile = "chapter" ++ Args#args.suffix,
- edoc_lib:write_file(Text, ".", OutFile, '', Encoding);
+ edoc_lib:write_file(Text, ".", OutFile, Encoding);
false ->
io:format("~s: not a regular file\n", [File]),
usage()
diff --git a/lib/hipe/cerl/erl_bif_types.erl b/lib/hipe/cerl/erl_bif_types.erl
index 74e93bf098..5b1401b34a 100644
--- a/lib/hipe/cerl/erl_bif_types.erl
+++ b/lib/hipe/cerl/erl_bif_types.erl
@@ -1070,9 +1070,6 @@ type(hipe_bifs, find_na_or_make_stub, 2, Xs, Opaques) ->
type(hipe_bifs, fun_to_address, 1, Xs, Opaques) ->
strict(hipe_bifs, fun_to_address, 1, Xs,
fun (_) -> t_integer() end, Opaques);
-%% type(hipe_bifs, get_emu_address, 1, Xs, Opaques) ->
-%% strict(hipe_bifs, get_emu_address, 1, Xs,
-%% fun (_) -> t_integer() end, Opaques); % address
type(hipe_bifs, get_fe, 2, Xs, Opaques) ->
strict(hipe_bifs, get_fe, 2, Xs, fun (_) -> t_integer() end, Opaques);
type(hipe_bifs, get_rts_param, 1, Xs, Opaques) ->
@@ -1081,9 +1078,6 @@ type(hipe_bifs, get_rts_param, 1, Xs, Opaques) ->
type(hipe_bifs, invalidate_funinfo_native_addresses, 1, Xs, Opaques) ->
strict(hipe_bifs, invalidate_funinfo_native_addresses, 1, Xs,
fun (_) -> t_nil() end, Opaques);
-%% type(hipe_bifs, make_native_stub, 2, Xs, Opaques) ->
-%% strict(hipe_bifs, make_native_stub, 2, Xs,
-%% fun (_) -> t_integer() end, Opaques); % address
type(hipe_bifs, mark_referred_from, 1, Xs, Opaques) ->
strict(hipe_bifs, mark_referred_from, 1, Xs,
fun (_) -> t_nil() end, Opaques);
@@ -2462,16 +2456,12 @@ arg_types(hipe_bifs, find_na_or_make_stub, 2) ->
[t_mfa(), t_boolean()];
arg_types(hipe_bifs, fun_to_address, 1) ->
[t_mfa()];
-%% arg_types(hipe_bifs, get_emu_address, 1) ->
-%% [t_mfa()];
arg_types(hipe_bifs, get_fe, 2) ->
[t_atom(), t_tuple([t_integer(), t_integer(), t_integer()])];
arg_types(hipe_bifs, get_rts_param, 1) ->
[t_fixnum()];
arg_types(hipe_bifs, invalidate_funinfo_native_addresses, 1) ->
[t_list(t_mfa())];
-%% arg_types(hipe_bifs, make_native_stub, 2) ->
-%% [t_integer(), t_arity()];
arg_types(hipe_bifs, mark_referred_from, 1) ->
[t_mfa()];
arg_types(hipe_bifs, merge_term, 1) ->
diff --git a/lib/hipe/ppc/hipe_rtl_to_ppc.erl b/lib/hipe/ppc/hipe_rtl_to_ppc.erl
index 7dfa56df29..a55fc137c3 100644
--- a/lib/hipe/ppc/hipe_rtl_to_ppc.erl
+++ b/lib/hipe/ppc/hipe_rtl_to_ppc.erl
@@ -102,10 +102,18 @@ conv_insn(I, Map, Data) ->
end.
conv_fconv(I, Map, Data) ->
- %% Dst := (double)Src, where Dst is FP reg and Src is int reg
+ %% Dst := (double)Src, where Dst is FP reg and Src is GP reg or imm
{Dst, Map0} = conv_fpreg(hipe_rtl:fconv_dst(I), Map),
- {Src, Map1} = conv_src(hipe_rtl:fconv_src(I), Map0), % exclude imm src
- I2 = mk_fconv(Dst, Src),
+ {Src, Map1} = conv_src(hipe_rtl:fconv_src(I), Map0),
+ I2 =
+ case hipe_ppc:is_temp(Src) of
+ true ->
+ mk_fconv(Dst, Src);
+ false ->
+ Tmp = new_untagged_temp(),
+ mk_li(Tmp, Src,
+ mk_fconv(Dst, Tmp))
+ end,
{I2, Map1, Data}.
mk_fconv(Dst, Src) ->
diff --git a/lib/hipe/rtl/hipe_rtl.erl b/lib/hipe/rtl/hipe_rtl.erl
index bc61bec0bd..2f62dd79ad 100644
--- a/lib/hipe/rtl/hipe_rtl.erl
+++ b/lib/hipe/rtl/hipe_rtl.erl
@@ -413,11 +413,11 @@ rtl_info_update(Rtl, Info) -> Rtl#rtl{info=Info}.
%% move
%%
-mk_move(Dst, Src) -> #move{dst=Dst, src=Src}.
+mk_move(Dst, Src) -> false = is_fpreg(Dst), false = is_fpreg(Src), #move{dst=Dst, src=Src}.
move_dst(#move{dst=Dst}) -> Dst.
-move_dst_update(M, NewDst) -> M#move{dst=NewDst}.
+move_dst_update(M, NewDst) -> false = is_fpreg(NewDst), M#move{dst=NewDst}.
move_src(#move{src=Src}) -> Src.
-move_src_update(M, NewSrc) -> M#move{src=NewSrc}.
+move_src_update(M, NewSrc) -> false = is_fpreg(NewSrc), M#move{src=NewSrc}.
%% is_move(#move{}) -> true;
%% is_move(_) -> false.
@@ -469,7 +469,11 @@ phi_remove_pred(Phi, Pred) ->
case NewArgList of
[Arg] -> %% the phi should be turned into a move instruction
{_Label,Var} = Arg,
- mk_move(phi_dst(Phi), Var);
+ Dst = phi_dst(Phi),
+ case {is_fpreg(Dst), is_fpreg(Var)} of
+ {true, true} -> mk_fmove(Dst, Var);
+ {false, false} -> mk_move(Dst, Var)
+ end;
%% io:format("~nPhi (~w) turned into move (~w) when removing pred ~w~n",[Phi,Move,Pred]),
[_|_] ->
Phi#phi{arglist=NewArgList}
@@ -836,11 +840,11 @@ fp_unop_op(#fp_unop{op=Op}) -> Op.
%% fmove
%%
-mk_fmove(X, Y) -> #fmove{dst=X, src=Y}.
+mk_fmove(X, Y) -> true = is_fpreg(X), true = is_fpreg(Y), #fmove{dst=X, src=Y}.
fmove_dst(#fmove{dst=Dst}) -> Dst.
-fmove_dst_update(M, NewDst) -> M#fmove{dst=NewDst}.
+fmove_dst_update(M, NewDst) -> true = is_fpreg(NewDst), M#fmove{dst=NewDst}.
fmove_src(#fmove{src=Src}) -> Src.
-fmove_src_update(M, NewSrc) -> M#fmove{src=NewSrc}.
+fmove_src_update(M, NewSrc) -> true = is_fpreg(NewSrc), M#fmove{src=NewSrc}.
%%
%% fconv
diff --git a/lib/hipe/sparc/hipe_rtl_to_sparc.erl b/lib/hipe/sparc/hipe_rtl_to_sparc.erl
index dc001f865e..fd21be3ae7 100644
--- a/lib/hipe/sparc/hipe_rtl_to_sparc.erl
+++ b/lib/hipe/sparc/hipe_rtl_to_sparc.erl
@@ -85,17 +85,17 @@ conv_insn(I, Map, Data) ->
end.
conv_fconv(I, Map, Data) ->
- %% Dst := (double)Src, where Dst is FP reg and Src is int reg
- {Src, Map1} = conv_src(hipe_rtl:fconv_src(I), Map), % exclude imm src
+ %% Dst := (double)Src, where Dst is FP reg and Src is GP reg or imm
+ {Src, Map1} = conv_src(hipe_rtl:fconv_src(I), Map),
{Dst, Map2} = conv_fpreg(hipe_rtl:fconv_dst(I), Map1),
I2 = mk_fconv(Src, Dst),
{I2, Map2, Data}.
mk_fconv(Src, Dst) ->
CSP = hipe_sparc:mk_temp(14, 'untagged'), % o6
- Disp = hipe_sparc:mk_simm13(100),
- [hipe_sparc:mk_store('stw', Src, CSP, Disp),
- hipe_sparc:mk_pseudo_fload(CSP, Disp, Dst, true),
+ Offset = 100,
+ mk_store('stw', Src, CSP, Offset) ++
+ [hipe_sparc:mk_pseudo_fload(CSP, hipe_sparc:mk_simm13(Offset), Dst, true),
hipe_sparc:mk_fp_unary('fitod', Dst, Dst)].
conv_fmove(I, Map, Data) ->
diff --git a/lib/hipe/x86/hipe_rtl_to_x86.erl b/lib/hipe/x86/hipe_rtl_to_x86.erl
index d77e4fed3b..36da2f4d44 100644
--- a/lib/hipe/x86/hipe_rtl_to_x86.erl
+++ b/lib/hipe/x86/hipe_rtl_to_x86.erl
@@ -236,7 +236,7 @@ conv_insn(I, Map, Data) ->
#fconv{} ->
{Dst, Map0} = conv_dst(hipe_rtl:fconv_dst(I), Map),
{[], Src, Map1} = conv_src(hipe_rtl:fconv_src(I), Map0),
- I2 = [hipe_x86:mk_fmove(Src, Dst)],
+ I2 = conv_fconv(Dst, Src),
{I2, Map1, Data};
X ->
%% gctest??
@@ -712,6 +712,19 @@ vmap_lookup(Map, Key) ->
vmap_bind(Map, Key, Val) ->
gb_trees:insert(Key, Val, Map).
+%%% Finalise the conversion of an Integer-to-Float operation.
+
+conv_fconv(Dst, Src) ->
+ case hipe_x86:is_imm(Src) of
+ false ->
+ [hipe_x86:mk_fmove(Src, Dst)];
+ true ->
+ %% cvtsi2sd does not allow src to be an immediate
+ Tmp = new_untagged_temp(),
+ [hipe_x86:mk_move(Src, Tmp),
+ hipe_x86:mk_fmove(Tmp, Dst)]
+ end.
+
%%% Finalise the conversion of a 2-address FP operation.
conv_fp_unary(Dst, Src, FpUnOp) ->
diff --git a/lib/inets/doc/src/http_uri.xml b/lib/inets/doc/src/http_uri.xml
index e64c375bba..acbd79b201 100644
--- a/lib/inets/doc/src/http_uri.xml
+++ b/lib/inets/doc/src/http_uri.xml
@@ -63,6 +63,7 @@ host() = string()
port() = pos_integer()
path() = string() - Representing a file path or directory path
query() = string()
+fragment() = string()
]]></code>
<marker id="scheme_defaults"></marker>
@@ -92,13 +93,16 @@ query() = string()
<v>URI = uri() </v>
<v>Options = [Option] </v>
<v>Option = {ipv6_host_with_brackets, boolean()} |
- {scheme_defaults, scheme_defaults()}]</v>
- <v>Result = {Scheme, UserInfo, Host, Port, Path, Query}</v>
+ {scheme_defaults, scheme_defaults()} |
+ {fragment, boolean()}]</v>
+ <v>Result = {Scheme, UserInfo, Host, Port, Path, Query} |
+ {Scheme, UserInfo, Host, Port, Path, Query, Fragment}</v>
<v>UserInfo = user_info()</v>
<v>Host = host()</v>
<v>Port = pos_integer()</v>
<v>Path = path()</v>
<v>Query = query()</v>
+ <v>Fragment = fragment()</v>
<v>Reason = term() </v>
</type>
<desc>
@@ -111,6 +115,9 @@ query() = string()
a scheme not found in the scheme defaults) a port number must be
provided or else the parsing will fail. </p>
+ <p>If the fragment option is true, the URI fragment will be returned as
+ part of the parsing result, otherwise it is completely ignored.</p>
+
<marker id="encode"></marker>
</desc>
</func>
diff --git a/lib/inets/doc/src/httpd.xml b/lib/inets/doc/src/httpd.xml
index 4ca038cc99..20c8a6b1b1 100644
--- a/lib/inets/doc/src/httpd.xml
+++ b/lib/inets/doc/src/httpd.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>1997</year><year>2013</year>
+ <year>1997</year><year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -249,7 +249,16 @@
<p>Limits the size of the message header of HTTP request.
Defaults to 10240. </p>
</item>
-
+
+ <marker id="prop_max_content_length"></marker>
+ <tag>{max_content_length, integer()}</tag>
+ <item>
+ <p>Maximum Content-Length in an incoming request, in bytes. Requests
+ with content larger than this are answered with Status 413.
+ Defaults to 100000000 (100 MB).
+ </p>
+ </item>
+
<marker id="prop_max_uri"></marker>
<tag>{max_uri_size, integer()}</tag>
<item>
diff --git a/lib/inets/doc/src/notes.xml b/lib/inets/doc/src/notes.xml
index fb7034498c..7f73aa5e7b 100644
--- a/lib/inets/doc/src/notes.xml
+++ b/lib/inets/doc/src/notes.xml
@@ -32,7 +32,40 @@
<file>notes.xml</file>
</header>
- <section><title>Inets 5.10.4</title>
+ <section><title>Inets 5.10.5</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ mod_alias now handles https-URIs properly</p>
+ <p>
+ Consistent view of configuration parameter
+ keep_alive_timeout, should be presented in the
+ httpd:info/[1,2] function in the same unit as it is
+ inputted.</p>
+ <p>
+ Own Id: OTP-12436 Aux Id: seq12786 </p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Gracefully handle invalid content-lenght headers instead
+ of crashing in list_to_integer.</p>
+ <p>
+ Own Id: OTP-12429</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Inets 5.10.4</title>
<section><title>Fixed Bugs and Malfunctions</title>
<list>
diff --git a/lib/inets/examples/httpd_load_test/hdlt_slave.erl b/lib/inets/examples/httpd_load_test/hdlt_slave.erl
index 52af9b5b90..41361418bc 100644
--- a/lib/inets/examples/httpd_load_test/hdlt_slave.erl
+++ b/lib/inets/examples/httpd_load_test/hdlt_slave.erl
@@ -180,7 +180,7 @@ ssh_slave_start(Host, ErlCmd) ->
?DEBUG("ssh_exec_erl -> done", []),
{ok, Connection, Channel};
Error3 ->
- ?LOG("failed exec comand: ~p", [Error3]),
+ ?LOG("failed exec command: ~p", [Error3]),
throw({error, {ssh_exec_failed, Error3}})
end.
diff --git a/lib/inets/src/http_client/httpc_cookie.erl b/lib/inets/src/http_client/httpc_cookie.erl
index 134115bdfa..ed306a84f5 100644
--- a/lib/inets/src/http_client/httpc_cookie.erl
+++ b/lib/inets/src/http_client/httpc_cookie.erl
@@ -334,9 +334,23 @@ add_domain(Str, #http_cookie{domain_default = true}) ->
add_domain(Str, #http_cookie{domain = Domain}) ->
Str ++ "; $Domain=" ++ Domain.
+is_set_cookie_valid("") ->
+ %% an empty Set-Cookie header is not valid
+ false;
+is_set_cookie_valid([$=|_]) ->
+ %% a Set-Cookie header without name is not valid
+ false;
+is_set_cookie_valid(SetCookieHeader) ->
+ %% a Set-Cookie header without name/value is not valid
+ case string:chr(SetCookieHeader, $=) of
+ 0 -> false;
+ _ -> true
+ end.
+
parse_set_cookies(CookieHeaders, DefaultPathDomain) ->
- %% empty Set-Cookie header is invalid according to RFC but some sites violate it
- SetCookieHeaders = [Value || {"set-cookie", Value} <- CookieHeaders, Value /= ""],
+ %% filter invalid Set-Cookie headers
+ SetCookieHeaders = [Value || {"set-cookie", Value} <- CookieHeaders,
+ is_set_cookie_valid(Value)],
Cookies = [parse_set_cookie(SetCookieHeader, DefaultPathDomain) ||
SetCookieHeader <- SetCookieHeaders],
%% print_cookies("Parsed Cookies", Cookies),
@@ -348,6 +362,8 @@ parse_set_cookie(CookieHeader, {DefaultPath, DefaultDomain}) ->
Name = string:substr(CookieHeader, 1, Pos - 1),
{Value, Attrs} =
case string:substr(CookieHeader, Pos + 1) of
+ [] ->
+ {"", ""};
[$;|ValueAndAttrs] ->
{"", string:tokens(ValueAndAttrs, ";")};
ValueAndAttrs ->
diff --git a/lib/inets/src/http_lib/http_internal.hrl b/lib/inets/src/http_lib/http_internal.hrl
index 53b776c4e7..54425740b5 100644
--- a/lib/inets/src/http_lib/http_internal.hrl
+++ b/lib/inets/src/http_lib/http_internal.hrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2002-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2002-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -28,6 +28,7 @@
-define(HTTP_MAX_URI_SIZE, nolimit).
-define(HTTP_MAX_VERSION_STRING, 8).
-define(HTTP_MAX_METHOD_STRING, 20).
+-define(HTTP_MAX_CONTENT_LENGTH, 100000000).
-ifndef(HTTP_DEFAULT_SSL_KIND).
-define(HTTP_DEFAULT_SSL_KIND, essl).
diff --git a/lib/inets/src/http_lib/http_request.erl b/lib/inets/src/http_lib/http_request.erl
index f295453bdd..a0833ddf01 100644
--- a/lib/inets/src/http_lib/http_request.erl
+++ b/lib/inets/src/http_lib/http_request.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2005-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2005-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -21,8 +21,16 @@
-include("http_internal.hrl").
--export([headers/2, http_headers/1, is_absolut_uri/1]).
+-export([headers/2, http_headers/1, is_absolut_uri/1, key_value/1]).
+
+key_value(KeyValueStr) ->
+ case lists:splitwith(fun($:) -> false; (_) -> true end, KeyValueStr) of
+ {Key, [$: | Value]} ->
+ {http_util:to_lower(string:strip(Key)), string:strip(Value)};
+ {_, []} ->
+ undefined
+ end.
%%-------------------------------------------------------------------------
%% headers(HeaderList, #http_request_h{}) -> #http_request_h{}
%% HeaderList - ["HeaderField:Value"]
@@ -34,14 +42,12 @@
%%-------------------------------------------------------------------------
headers([], Headers) ->
Headers;
-headers([Header | Tail], Headers) ->
- case lists:splitwith(fun($:) -> false; (_) -> true end, Header) of
- {Key, [$: | Value]} ->
- headers(Tail, headers(http_util:to_lower(string:strip(Key)),
- string:strip(Value), Headers));
- {_, []} ->
- headers(Tail, Headers)
- end.
+headers([{Key, Value} | Tail], Headers) ->
+ headers(Tail, headers(Key, Value, Headers));
+headers([undefined], Headers) ->
+ Headers;
+headers(KeyValues, Headers) ->
+ headers([key_value(KeyValue) || KeyValue <- KeyValues], Headers).
%%-------------------------------------------------------------------------
%% headers(#http_request_h{}) -> HeaderList
diff --git a/lib/inets/src/http_lib/http_uri.erl b/lib/inets/src/http_lib/http_uri.erl
index 5962001c3a..350a4bc169 100644
--- a/lib/inets/src/http_lib/http_uri.erl
+++ b/lib/inets/src/http_lib/http_uri.erl
@@ -90,8 +90,8 @@ parse(AbsURI, Opts) ->
{error, Reason};
{Scheme, DefaultPort, Rest} ->
case (catch parse_uri_rest(Scheme, DefaultPort, Rest, Opts)) of
- {ok, {UserInfo, Host, Port, Path, Query}} ->
- {ok, {Scheme, UserInfo, Host, Port, Path, Query}};
+ {ok, Result} ->
+ {ok, Result};
{error, Reason} ->
{error, {Reason, Scheme, AbsURI}};
_ ->
@@ -148,27 +148,22 @@ parse_scheme(AbsURI, Opts) ->
end.
parse_uri_rest(Scheme, DefaultPort, "//" ++ URIPart, Opts) ->
- {Authority, PathQuery} =
- case split_uri(URIPart, "/", URIPart, 1, 0) of
- Split = {_, _} ->
- Split;
- URIPart ->
- case split_uri(URIPart, "\\?", URIPart, 1, 0) of
- Split = {_, _} ->
- Split;
- URIPart ->
- {URIPart,""}
- end
- end,
+ {Authority, PathQueryFragment} =
+ split_uri(URIPart, "[/?#]", {URIPart, ""}, 1, 0),
+ {RawPath, QueryFragment} =
+ split_uri(PathQueryFragment, "[?#]", {PathQueryFragment, ""}, 1, 0),
+ {Query, Fragment} =
+ split_uri(QueryFragment, "#", {QueryFragment, ""}, 1, 0),
{UserInfo, HostPort} = split_uri(Authority, "@", {"", Authority}, 1, 1),
{Host, Port} = parse_host_port(Scheme, DefaultPort, HostPort, Opts),
- {Path, Query} = parse_path_query(PathQuery),
- {ok, {UserInfo, Host, Port, Path, Query}}.
-
+ Path = path(RawPath),
+ case lists:keyfind(fragment, 1, Opts) of
+ {fragment, true} ->
+ {ok, {Scheme, UserInfo, Host, Port, Path, Query, Fragment}};
+ _ ->
+ {ok, {Scheme, UserInfo, Host, Port, Path, Query}}
+ end.
-parse_path_query(PathQuery) ->
- {Path, Query} = split_uri(PathQuery, "\\?", {PathQuery, ""}, 1, 0),
- {path(Path), Query}.
%% In this version of the function, we no longer need
%% the Scheme argument, but just in case...
diff --git a/lib/inets/src/http_server/httpd_conf.erl b/lib/inets/src/http_server/httpd_conf.erl
index 8f68d9fcd5..78dda794db 100644
--- a/lib/inets/src/http_server/httpd_conf.erl
+++ b/lib/inets/src/http_server/httpd_conf.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -205,13 +205,13 @@ load("MaxURISize " ++ MaxHeaderSize, []) ->
" is an invalid number of MaxHeaderSize")}
end;
-load("MaxBodySize " ++ MaxBodySize, []) ->
- case make_integer(MaxBodySize) of
+load("MaxContentLength " ++ Max, []) ->
+ case make_integer(Max) of
{ok, Integer} ->
- {ok, [], {max_body_size,Integer}};
+ {ok, [], {max_content_length, Integer}};
{error, _} ->
- {error, ?NICE(clean(MaxBodySize) ++
- " is an invalid number of MaxBodySize")}
+ {error, ?NICE(clean(Max) ++
+ " is an invalid number of MaxContentLength")}
end;
load("ServerName " ++ ServerName, []) ->
@@ -337,7 +337,7 @@ load("MaxKeepAliveRequest " ++ MaxRequests, []) ->
load("KeepAliveTimeout " ++ Timeout, []) ->
case make_integer(Timeout) of
{ok, Integer} ->
- {ok, [], {keep_alive_timeout, Integer*1000}};
+ {ok, [], {keep_alive_timeout, Integer}};
{error, _} ->
{error, ?NICE(clean(Timeout)++" is an invalid KeepAliveTimeout")}
end;
@@ -569,6 +569,12 @@ validate_config_params([{max_body_size, Value} | Rest])
validate_config_params([{max_body_size, Value} | _]) ->
throw({max_body_size, Value});
+validate_config_params([{max_content_length, Value} | Rest])
+ when is_integer(Value) andalso (Value > 0) ->
+ validate_config_params(Rest);
+validate_config_params([{max_content_length, Value} | _]) ->
+ throw({max_content_length, Value});
+
validate_config_params([{server_name, Value} | Rest])
when is_list(Value) ->
validate_config_params(Rest);
@@ -635,7 +641,7 @@ validate_config_params([{max_keep_alive_request, Value} | Rest])
when is_integer(Value) andalso (Value > 0) ->
validate_config_params(Rest);
validate_config_params([{max_keep_alive_request, Value} | _]) ->
- throw({max_header_size, Value});
+ throw({max_keep_alive_request, Value});
validate_config_params([{keep_alive_timeout, Value} | Rest])
when is_integer(Value) andalso (Value >= 0) ->
@@ -799,7 +805,7 @@ store({server_tokens, ServerTokens} = Entry, _ConfigList) ->
Server = server(ServerTokens),
{ok, [Entry, {server, Server}]};
store({keep_alive_timeout, KeepAliveTimeout}, _ConfigList) ->
- {ok, {keep_alive_timeout, KeepAliveTimeout * 1000}};
+ {ok, {keep_alive_timeout, KeepAliveTimeout}};
store(ConfigListEntry, _ConfigList) ->
{ok, ConfigListEntry}.
diff --git a/lib/inets/src/http_server/httpd_request.erl b/lib/inets/src/http_server/httpd_request.erl
index 712c73599f..6985065c3e 100644
--- a/lib/inets/src/http_server/httpd_request.erl
+++ b/lib/inets/src/http_server/httpd_request.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2005-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2005-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -118,18 +118,17 @@ validate(Method, Uri, Version) ->
%% create it.
%% ----------------------------------------------------------------------
update_mod_data(ModData, Method, RequestURI, HTTPVersion, Headers)->
- ParsedHeaders = tagup_header(Headers),
- PersistentConn = get_persistens(HTTPVersion, ParsedHeaders,
+ PersistentConn = get_persistens(HTTPVersion, Headers,
ModData#mod.config_db),
{ok, ModData#mod{data = [],
method = Method,
absolute_uri = format_absolute_uri(RequestURI,
- ParsedHeaders),
+ Headers),
request_uri = format_request_uri(RequestURI),
http_version = HTTPVersion,
request_line = Method ++ " " ++ RequestURI ++
" " ++ HTTPVersion,
- parsed_header = ParsedHeaders,
+ parsed_header = Headers,
connection = PersistentConn}}.
%%%========================================================================
@@ -146,14 +145,14 @@ parse_method(_, _, _, Max, _, _) ->
%% We do not know the version of the client as it comes after the
%% method send the lowest version in the response so that the client
%% will be able to handle it.
- {error, {too_long, Max, 413, "Method unreasonably long"}, lowest_version()}.
+ {error, {size_error, Max, 413, "Method unreasonably long"}, lowest_version()}.
parse_uri(_, _, Current, MaxURI, _, _)
when (Current > MaxURI) andalso (MaxURI =/= nolimit) ->
%% We do not know the version of the client as it comes after the
%% uri send the lowest version in the response so that the client
%% will be able to handle it.
- {error, {too_long, MaxURI, 414, "URI unreasonably long"},lowest_version()};
+ {error, {size_error, MaxURI, 414, "URI unreasonably long"},lowest_version()};
parse_uri(<<>>, URI, Current, Max, MaxSizes, Result) ->
{?MODULE, parse_uri, [URI, Current, Max, MaxSizes, Result]};
parse_uri(<<?SP, Rest/binary>>, URI, _, _, MaxSizes, Result) ->
@@ -179,12 +178,12 @@ parse_version(<<?CR>> = Data, Version, Current, Max, MaxSizes, Result) ->
parse_version(<<Octet, Rest/binary>>, Version, Current, Max, MaxSizes, Result) when Current =< Max ->
parse_version(Rest, [Octet | Version], Current + 1, Max, MaxSizes, Result);
parse_version(_, _, _, Max,_,_) ->
- {error, {too_long, Max, 413, "Version string unreasonably long"}, lowest_version()}.
+ {error, {size_error, Max, 413, "Version string unreasonably long"}, lowest_version()}.
parse_headers(_, _, _, Current, Max, _, Result)
when Max =/= nolimit andalso Current > Max ->
HttpVersion = lists:nth(3, lists:reverse(Result)),
- {error, {too_long, Max, 413, "Headers unreasonably long"}, HttpVersion};
+ {error, {size_error, Max, 413, "Headers unreasonably long"}, HttpVersion};
parse_headers(<<>>, Header, Headers, Current, Max, MaxSizes, Result) ->
{?MODULE, parse_headers, [<<>>, Header, Headers, Current, Max,
@@ -204,14 +203,22 @@ parse_headers(<<?CR,?LF,?CR,?LF,Body/binary>>, [], [], _, _, _, Result) ->
Result])),
{ok, NewResult};
parse_headers(<<?CR,?LF,?CR,?LF,Body/binary>>, Header, Headers, _, _,
- _, Result) ->
- HTTPHeaders = [lists:reverse(Header) | Headers],
- RequestHeaderRcord =
- http_request:headers(HTTPHeaders, #http_request_h{}),
- NewResult =
- list_to_tuple(lists:reverse([Body, {RequestHeaderRcord,
- HTTPHeaders} | Result])),
- {ok, NewResult};
+ MaxSizes, Result) ->
+ case http_request:key_value(lists:reverse(Header)) of
+ undefined -> %% Skip headers with missing :
+ {ok, list_to_tuple(lists:reverse([Body, {http_request:headers(Headers, #http_request_h{}), Headers} | Result]))};
+ NewHeader ->
+ case check_header(NewHeader, MaxSizes) of
+ ok ->
+ {ok, list_to_tuple(lists:reverse([Body, {http_request:headers([NewHeader | Headers],
+ #http_request_h{}),
+ [NewHeader | Headers]} | Result]))};
+
+ {error, Reason} ->
+ HttpVersion = lists:nth(3, lists:reverse(Result)),
+ {error, Reason, HttpVersion}
+ end
+ end;
parse_headers(<<?CR,?LF,?CR>> = Data, Header, Headers, Current, Max,
MaxSizes, Result) ->
@@ -243,8 +250,21 @@ parse_headers(<<?LF, Octet, Rest/binary>>, Header, Headers, Current, Max,
MaxSizes, Result);
parse_headers(<<?CR,?LF, Octet, Rest/binary>>, Header, Headers, _, Max,
MaxSizes, Result) ->
- parse_headers(Rest, [Octet], [lists:reverse(Header) | Headers],
- 0, Max, MaxSizes, Result);
+ case http_request:key_value(lists:reverse(Header)) of
+ undefined -> %% Skip headers with missing :
+ parse_headers(Rest, [Octet], Headers,
+ 0, Max, MaxSizes, Result);
+ NewHeader ->
+ case check_header(NewHeader, MaxSizes) of
+ ok ->
+ parse_headers(Rest, [Octet], [NewHeader | Headers],
+ 0, Max, MaxSizes, Result);
+ {error, Reason} ->
+ HttpVersion = lists:nth(3, lists:reverse(Result)),
+ {error, Reason, HttpVersion}
+ end
+ end;
+
parse_headers(<<?CR>> = Data, Header, Headers, Current, Max,
MaxSizes, Result) ->
{?MODULE, parse_headers, [Data, Header, Headers, Current, Max,
@@ -388,29 +408,25 @@ get_persistens(HTTPVersion,ParsedHeader,ConfigDB)->
false
end.
-
-%%----------------------------------------------------------------------
-%% tagup_header
-%%
-%% Parses the header of a HTTP request and returns a key,value tuple
-%% list containing Name and Value of each header directive as of:
-%%
-%% Content-Type: multipart/mixed -> {"Content-Type", "multipart/mixed"}
-%%
-%% But in http/1.1 the field-names are case insencitive so now it must be
-%% Content-Type: multipart/mixed -> {"content-type", "multipart/mixed"}
-%% The standard furthermore says that leading and traling white space
-%% is not a part of the fieldvalue and shall therefore be removed.
-%%----------------------------------------------------------------------
-tagup_header([]) -> [];
-tagup_header([Line|Rest]) -> [tag(Line, [])|tagup_header(Rest)].
-
-tag([], Tag) ->
- {http_util:to_lower(lists:reverse(Tag)), ""};
-tag([$:|Rest], Tag) ->
- {http_util:to_lower(lists:reverse(Tag)), string:strip(Rest)};
-tag([Chr|Rest], Tag) ->
- tag(Rest, [Chr|Tag]).
-
lowest_version()->
"HTTP/0.9".
+
+check_header({"content-length", Value}, Maxsizes) ->
+ Max = proplists:get_value(max_content_length, Maxsizes),
+ MaxLen = length(integer_to_list(Max)),
+ case length(Value) =< MaxLen of
+ true ->
+ try
+ _ = list_to_integer(Value),
+ ok
+ catch _:_ ->
+ {error, {size_error, Max, 411, "content-length not an integer"}}
+ end;
+ false ->
+ {error, {size_error, Max, 413, "content-length unreasonably long"}}
+ end;
+check_header(_, _) ->
+ ok.
+
+
+
diff --git a/lib/inets/src/http_server/httpd_request_handler.erl b/lib/inets/src/http_server/httpd_request_handler.erl
index 9bea58cc9e..f7a9fe5d49 100644
--- a/lib/inets/src/http_server/httpd_request_handler.erl
+++ b/lib/inets/src/http_server/httpd_request_handler.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -96,8 +96,9 @@ init([Manager, ConfigDB, AcceptTimeout]) ->
proc_lib:init_ack({ok, self()}),
{SocketType, Socket} = await_socket_ownership_transfer(AcceptTimeout),
-
- KeepAliveTimeOut = httpd_util:lookup(ConfigDB, keep_alive_timeout, 150000),
+
+ %%Timeout value is in seconds we want it in milliseconds
+ KeepAliveTimeOut = 1000 * httpd_util:lookup(ConfigDB, keep_alive_timeout, 150),
case http_transport:negotiate(SocketType, Socket, ?HANDSHAKE_TIMEOUT) of
{error, _Error} ->
@@ -119,11 +120,15 @@ continue_init(Manager, ConfigDB, SocketType, Socket, TimeOut) ->
MaxHeaderSize = max_header_size(ConfigDB),
MaxURISize = max_uri_size(ConfigDB),
NrOfRequest = max_keep_alive_request(ConfigDB),
-
+ MaxContentLen = max_content_length(ConfigDB),
+
{_, Status} = httpd_manager:new_connection(Manager),
MFA = {httpd_request, parse, [[{max_uri, MaxURISize}, {max_header, MaxHeaderSize},
- {max_version, ?HTTP_MAX_VERSION_STRING}, {max_method, ?HTTP_MAX_METHOD_STRING}]]},
+ {max_version, ?HTTP_MAX_VERSION_STRING},
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, MaxContentLen}
+ ]]},
State = #state{mod = Mod,
manager = Manager,
@@ -207,7 +212,7 @@ handle_info({Proto, Socket, Data},
set_new_data_size(cancel_request_timeout(State), NewDataSize)
end,
handle_http_msg(Result, NewState);
- {error, {too_long, MaxSize, ErrCode, ErrStr}, Version} ->
+ {error, {size_error, MaxSize, ErrCode, ErrStr}, Version} ->
NewModData = ModData#mod{http_version = Version},
httpd_response:send_status(NewModData, ErrCode, ErrStr),
Reason = io_lib:format("~p: ~p max size is ~p~n",
@@ -444,8 +449,7 @@ handle_body(#state{headers = Headers, body = Body, mod = ModData} = State,
error_log(Reason, ModData),
{stop, normal, State#state{response_sent = true}};
_ ->
- Length =
- list_to_integer(Headers#http_request_h.'content-length'),
+ Length = list_to_integer(Headers#http_request_h.'content-length'),
case ((Length =< MaxBodySize) or (MaxBodySize == nolimit)) of
true ->
case httpd_request:whole_body(Body, Length) of
@@ -454,7 +458,7 @@ handle_body(#state{headers = Headers, body = Body, mod = ModData} = State,
ModData#mod.socket,
[{active, once}]),
{noreply, State#state{mfa =
- {Module, Function, Args}}};
+ {Module, Function, Args}}};
{ok, NewBody} ->
handle_response(
@@ -471,7 +475,7 @@ handle_body(#state{headers = Headers, body = Body, mod = ModData} = State,
handle_expect(#state{headers = Headers, mod =
#mod{config_db = ConfigDB} = ModData} = State,
MaxBodySize) ->
- Length = Headers#http_request_h.'content-length',
+ Length = list_to_integer(Headers#http_request_h.'content-length'),
case expect(Headers, ModData#mod.http_version, ConfigDB) of
continue when (MaxBodySize > Length) orelse (MaxBodySize =:= nolimit) ->
httpd_response:send_status(ModData, 100, ""),
@@ -545,9 +549,13 @@ handle_next_request(#state{mod = #mod{connection = true} = ModData,
init_data = ModData#mod.init_data},
MaxHeaderSize = max_header_size(ModData#mod.config_db),
MaxURISize = max_uri_size(ModData#mod.config_db),
+ MaxContentLen = max_content_length(ModData#mod.config_db),
MFA = {httpd_request, parse, [[{max_uri, MaxURISize}, {max_header, MaxHeaderSize},
- {max_version, ?HTTP_MAX_VERSION_STRING}, {max_method, ?HTTP_MAX_METHOD_STRING}]]},
+ {max_version, ?HTTP_MAX_VERSION_STRING},
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, MaxContentLen}
+ ]]},
TmpState = State#state{mod = NewModData,
mfa = MFA,
max_keep_alive_request = decrease(Max),
@@ -630,3 +638,5 @@ max_body_size(ConfigDB) ->
max_keep_alive_request(ConfigDB) ->
httpd_util:lookup(ConfigDB, max_keep_alive_request, infinity).
+max_content_length(ConfigDB) ->
+ httpd_util:lookup(ConfigDB, max_content_length, ?HTTP_MAX_CONTENT_LENGTH).
diff --git a/lib/inets/src/http_server/mod_alias.erl b/lib/inets/src/http_server/mod_alias.erl
index 0b9fe4cfe0..5039cd56b5 100644
--- a/lib/inets/src/http_server/mod_alias.erl
+++ b/lib/inets/src/http_server/mod_alias.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -55,6 +55,7 @@ do(#mod{data = Data} = Info) ->
do_alias(#mod{config_db = ConfigDB,
request_uri = ReqURI,
+ socket_type = SocketType,
data = Data}) ->
{ShortPath, Path, AfterPath} =
real_name(ConfigDB, ReqURI, which_alias(ConfigDB)),
@@ -70,8 +71,9 @@ do_alias(#mod{config_db = ConfigDB,
(LastChar =/= $/)) ->
?hdrt("directory and last-char is a /", []),
ServerName = which_server_name(ConfigDB),
- Port = port_string( which_port(ConfigDB) ),
- URL = "http://" ++ ServerName ++ Port ++ ReqURI ++ "/",
+ Port = port_string(which_port(ConfigDB)),
+ Protocol = get_protocol(SocketType),
+ URL = Protocol ++ ServerName ++ Port ++ ReqURI ++ "/",
ReasonPhrase = httpd_util:reason_phrase(301),
Message = httpd_util:message(301, URL, ConfigDB),
{proceed,
@@ -94,6 +96,12 @@ port_string(80) ->
port_string(Port) ->
":" ++ integer_to_list(Port).
+get_protocol(ip_comm) ->
+ "http://";
+get_protocol(_) ->
+ %% Should clean up to have only one ssl type essl vs ssl is not relevant any more
+ "https://".
+
%% real_name
real_name(ConfigDB, RequestURI, []) ->
diff --git a/lib/inets/test/http_format_SUITE.erl b/lib/inets/test/http_format_SUITE.erl
index d4a3f28f38..5952e9fd6e 100644
--- a/lib/inets/test/http_format_SUITE.erl
+++ b/lib/inets/test/http_format_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2004-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2004-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -355,10 +355,12 @@ http_request(Config) when is_list(Config) ->
"http://www.erlang.org",
"HTTP/1.1",
{#http_request_h{host = "www.erlang.org", te = []},
- ["te: ","host:www.erlang.org"]}, <<>>} =
+ [{"te", []}, {"host", "www.erlang.org"}]}, <<>>} =
parse(httpd_request, parse, [[{max_header, ?HTTP_MAX_HEADER_SIZE},
{max_version, ?HTTP_MAX_VERSION_STRING},
- {max_method, ?HTTP_MAX_METHOD_STRING}]],
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, ?HTTP_MAX_CONTENT_LENGTH}
+ ]],
HttpHead),
HttpHead1 = ["GET http://www.erlang.org HTTP/1.1" ++
@@ -369,7 +371,9 @@ http_request(Config) when is_list(Config) ->
{#http_request_h{}, []}, <<>>} =
parse(httpd_request, parse, [[{max_header, ?HTTP_MAX_HEADER_SIZE},
{max_version, ?HTTP_MAX_VERSION_STRING},
- {max_method, ?HTTP_MAX_METHOD_STRING}]], HttpHead1),
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, ?HTTP_MAX_CONTENT_LENGTH}
+ ]], HttpHead1),
HttpHead2 = ["GET http://www.erlang.org HTTP/1.1" ++
@@ -380,7 +384,9 @@ http_request(Config) when is_list(Config) ->
{#http_request_h{}, []}, <<>>} =
parse(httpd_request, parse, [[{max_header, ?HTTP_MAX_HEADER_SIZE},
{max_version, ?HTTP_MAX_VERSION_STRING},
- {max_method, ?HTTP_MAX_METHOD_STRING}]], HttpHead2),
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, ?HTTP_MAX_CONTENT_LENGTH}
+ ]], HttpHead2),
%% Note the following body is not related to the headers above
HttpBody = ["<HTML>\n<HEAD>\n<TITLE> dummy </TITLE>\n</HEAD>\n<BODY>\n",
diff --git a/lib/inets/test/httpc_SUITE.erl b/lib/inets/test/httpc_SUITE.erl
index 63f8bc5bc6..0e89e831fb 100644
--- a/lib/inets/test/httpc_SUITE.erl
+++ b/lib/inets/test/httpc_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2004-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2004-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -92,6 +92,7 @@ only_simulated() ->
cookie,
cookie_profile,
empty_set_cookie,
+ invalid_set_cookie,
trace,
stream_once,
stream_single_chunk,
@@ -570,6 +571,18 @@ empty_set_cookie(Config) when is_list(Config) ->
ok = httpc:set_options([{cookies, disabled}]).
%%-------------------------------------------------------------------------
+invalid_set_cookie(doc) ->
+ ["Test ignoring invalid Set-Cookie header"];
+invalid_set_cookie(Config) when is_list(Config) ->
+ ok = httpc:set_options([{cookies, enabled}]),
+
+ URL = url(group_name(Config), "/invalid_set_cookie.html", Config),
+ {ok, {{_,200,_}, [_|_], [_|_]}} =
+ httpc:request(get, {URL, []}, [], []),
+
+ ok = httpc:set_options([{cookies, disabled}]).
+
+%%-------------------------------------------------------------------------
headers_as_is(doc) ->
["Test the option headers_as_is"];
headers_as_is(Config) when is_list(Config) ->
@@ -1275,8 +1288,9 @@ dummy_server_init(Caller, ip_comm, Inet, _) ->
dummy_ipcomm_server_loop({httpd_request, parse, [[{max_uri, ?HTTP_MAX_URI_SIZE},
{max_header, ?HTTP_MAX_HEADER_SIZE},
{max_version,?HTTP_MAX_VERSION_STRING},
- {max_method, ?HTTP_MAX_METHOD_STRING}]]},
- [], ListenSocket);
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, ?HTTP_MAX_CONTENT_LENGTH}]]},
+ [], ListenSocket);
dummy_server_init(Caller, ssl, Inet, SSLOptions) ->
BaseOpts = [binary, {reuseaddr,true}, {active, false} |
@@ -1290,7 +1304,9 @@ dummy_ssl_server_init(Caller, BaseOpts, Inet) ->
dummy_ssl_server_loop({httpd_request, parse, [[{max_uri, ?HTTP_MAX_URI_SIZE},
{max_method, ?HTTP_MAX_METHOD_STRING},
{max_version,?HTTP_MAX_VERSION_STRING},
- {max_method, ?HTTP_MAX_METHOD_STRING}]]},
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, ?HTTP_MAX_CONTENT_LENGTH}
+ ]]},
[], ListenSocket).
dummy_ipcomm_server_loop(MFA, Handlers, ListenSocket) ->
@@ -1367,16 +1383,20 @@ handle_request(Module, Function, Args, Socket) ->
stop ->
stop;
<<>> ->
- {httpd_request, parse, [[<<>>, [{max_uri, ?HTTP_MAX_URI_SIZE},
+ {httpd_request, parse, [[{max_uri,?HTTP_MAX_URI_SIZE},
{max_header, ?HTTP_MAX_HEADER_SIZE},
{max_version,?HTTP_MAX_VERSION_STRING},
- {max_method, ?HTTP_MAX_METHOD_STRING}]]]};
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, ?HTTP_MAX_CONTENT_LENGTH}
+ ]]};
Data ->
handle_request(httpd_request, parse,
[Data, [{max_uri, ?HTTP_MAX_URI_SIZE},
- {max_header, ?HTTP_MAX_HEADER_SIZE},
- {max_version,?HTTP_MAX_VERSION_STRING},
- {max_method, ?HTTP_MAX_METHOD_STRING}]], Socket)
+ {max_header, ?HTTP_MAX_HEADER_SIZE},
+ {max_version,?HTTP_MAX_VERSION_STRING},
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, ?HTTP_MAX_CONTENT_LENGTH}
+ ]], Socket)
end;
NewMFA ->
NewMFA
@@ -1466,7 +1486,7 @@ dummy_ssl_server_hang_loop(_) ->
ensure_host_header_with_port([]) ->
false;
-ensure_host_header_with_port(["host: " ++ Host| _]) ->
+ensure_host_header_with_port([{"host", Host}| _]) ->
case string:tokens(Host, [$:]) of
[_ActualHost, _Port] ->
true;
@@ -1478,7 +1498,7 @@ ensure_host_header_with_port([_|T]) ->
auth_header([]) ->
auth_header_not_found;
-auth_header(["authorization:" ++ Value | _]) ->
+auth_header([{"authorization", Value} | _]) ->
{ok, string:strip(Value)};
auth_header([_ | Tail]) ->
auth_header(Tail).
@@ -1495,7 +1515,7 @@ handle_auth("Basic " ++ UserInfo, Challange, DefaultResponse) ->
check_cookie([]) ->
ct:fail(no_cookie_header);
-check_cookie(["cookie:" ++ _Value | _]) ->
+check_cookie([{"cookie", _} | _]) ->
ok;
check_cookie([_Head | Tail]) ->
check_cookie(Tail).
@@ -1715,6 +1735,14 @@ handle_uri(_,"/empty_set_cookie.html",_,_,_,_) ->
"Content-Length:32\r\n\r\n"++
"<HTML><BODY>foobar</BODY></HTML>";
+handle_uri(_,"/invalid_set_cookie.html",_,_,_,_) ->
+ "HTTP/1.1 200 ok\r\n" ++
+ "set-cookie: =\r\n" ++
+ "set-cookie: name=\r\n" ++
+ "set-cookie: name-or-value\r\n" ++
+ "Content-Length:32\r\n\r\n"++
+ "<HTML><BODY>foobar</BODY></HTML>";
+
handle_uri(_,"/missing_crlf.html",_,_,_,_) ->
"HTTP/1.1 200 ok" ++
"Content-Length:32\r\n" ++
diff --git a/lib/inets/test/httpd_SUITE.erl b/lib/inets/test/httpd_SUITE.erl
index 4010597657..342004f19b 100644
--- a/lib/inets/test/httpd_SUITE.erl
+++ b/lib/inets/test/httpd_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2013-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -132,6 +132,7 @@ http_get() ->
bad_hex,
missing_CR,
max_header,
+ max_content_length,
ipv6
].
@@ -979,13 +980,22 @@ max_header(Config) when is_list(Config) ->
Host = ?config(host, Config),
case Version of
"HTTP/0.9" ->
- {skip, no_implemented};
+ {skip, not_implemented};
_ ->
dos_hostname(?config(type, Config), ?config(port, Config), Host,
?config(node, Config), Version, ?MAX_HEADER_SIZE)
end.
%%-------------------------------------------------------------------------
+max_content_length() ->
+ ["Denial Of Service (DOS) attack, prevented by max_content_length"].
+max_content_length(Config) when is_list(Config) ->
+ Version = ?config(http_version, Config),
+ Host = ?config(host, Config),
+ garbage_content_length(?config(type, Config), ?config(port, Config), Host,
+ ?config(node, Config), Version).
+
+%%-------------------------------------------------------------------------
security_1_1(Config) when is_list(Config) ->
security([{http_version, "HTTP/1.1"} | Config]).
@@ -1368,7 +1378,9 @@ server_config(http_reload, Config) ->
server_config(https_reload, Config) ->
[{keep_alive_timeout, 2}] ++ server_config(https, Config);
server_config(http_limit, Config) ->
- [{max_clients, 1}] ++ server_config(http, Config);
+ [{max_clients, 1},
+ %% Make sure option checking code is run
+ {max_content_length, 100000002}] ++ server_config(http, Config);
server_config(https_limit, Config) ->
[{max_clients, 1}] ++ server_config(https, Config);
server_config(http_basic_auth, Config) ->
@@ -1814,7 +1826,7 @@ dos_hostname(Type, Port, Host, Node, Version, Max) ->
ok = httpd_test_lib:verify_request(Type, Host, Port, Node,
dos_hostname_request(TooLongHeader, Version),
- [{statuscode, dos_code(Version)},
+ [{statuscode, request_entity_too_large_code(Version)},
{version, Version}]).
dos_hostname_request(Host, Version) ->
dos_http_request("GET / ", Version, Host).
@@ -1824,11 +1836,32 @@ dos_http_request(Request, "HTTP/1.1" = Version, Host) ->
dos_http_request(Request, Version, Host) ->
Request ++ Version ++ "\r\nhost:" ++ Host ++ "\r\n\r\n".
-dos_code("HTTP/1.0") ->
+request_entity_too_large_code("HTTP/1.0") ->
403; %% 413 not defined in HTTP/1.0
-dos_code(_) ->
+request_entity_too_large_code(_) ->
413.
+length_required_code("HTTP/1.0") ->
+ 403; %% 411 not defined in HTTP/1.0
+length_required_code(_) ->
+ 411.
+
+garbage_content_length(Type, Port, Host, Node, Version) ->
+ ok = httpd_test_lib:verify_request(Type, Host, Port, Node,
+ garbage_content_length_request("GET / ", Version, Host, "aaaa"),
+ [{statuscode, length_required_code(Version)},
+ {version, Version}]),
+ ok = httpd_test_lib:verify_request(Type, Host, Port, Node,
+ garbage_content_length_request("GET / ", Version, Host,
+ lists:duplicate($a, 100)),
+ [{statuscode, request_entity_too_large_code(Version)},
+ {version, Version}]).
+
+garbage_content_length_request(Request, Version, Host, Garbage) ->
+ http_request(Request, Version, Host,
+ {"content-length:" ++ Garbage, "Body with garbage content length indicator"}).
+
+
update_password(Node, ServerRoot, _Address, Port, AuthPrefix, Dir, Old, New)->
Directory = filename:join([ServerRoot, "htdocs", AuthPrefix ++ Dir]),
rpc:call(Node, mod_auth, update_password,
diff --git a/lib/inets/test/uri_SUITE.erl b/lib/inets/test/uri_SUITE.erl
index 9ba09e1474..f75e347d0c 100644
--- a/lib/inets/test/uri_SUITE.erl
+++ b/lib/inets/test/uri_SUITE.erl
@@ -46,6 +46,7 @@ all() ->
userinfo,
scheme,
queries,
+ fragments,
escaped,
hexed_query
].
@@ -105,6 +106,42 @@ queries(Config) when is_list(Config) ->
{ok, {http,[],"localhost",8888,"/foobar.html","?foo=bar&foobar=42"}} =
http_uri:parse("http://localhost:8888/foobar.html?foo=bar&foobar=42").
+fragments(Config) when is_list(Config) ->
+ {ok, {http,[],"localhost",80,"/",""}} =
+ http_uri:parse("http://localhost#fragment"),
+ {ok, {http,[],"localhost",80,"/path",""}} =
+ http_uri:parse("http://localhost/path#fragment"),
+ {ok, {http,[],"localhost",80,"/","?query"}} =
+ http_uri:parse("http://localhost?query#fragment"),
+ {ok, {http,[],"localhost",80,"/path","?query"}} =
+ http_uri:parse("http://localhost/path?query#fragment"),
+ {ok, {http,[],"localhost",80,"/","","#fragment"}} =
+ http_uri:parse("http://localhost#fragment", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/path","","#fragment"}} =
+ http_uri:parse("http://localhost/path#fragment", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/","?query","#fragment"}} =
+ http_uri:parse("http://localhost?query#fragment", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/path","?query","#fragment"}} =
+ http_uri:parse("http://localhost/path?query#fragment",
+ [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/","",""}} =
+ http_uri:parse("http://localhost", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/path","",""}} =
+ http_uri:parse("http://localhost/path", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/","?query",""}} =
+ http_uri:parse("http://localhost?query", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/path","?query",""}} =
+ http_uri:parse("http://localhost/path?query", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/","","#"}} =
+ http_uri:parse("http://localhost#", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/path","","#"}} =
+ http_uri:parse("http://localhost/path#", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/","?query","#"}} =
+ http_uri:parse("http://localhost?query#", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/path","?query","#"}} =
+ http_uri:parse("http://localhost/path?query#", [{fragment,true}]),
+ ok.
+
escaped(Config) when is_list(Config) ->
{ok, {http,[],"www.somedomain.com",80,"/%2Eabc",[]}} =
http_uri:parse("http://www.somedomain.com/%2Eabc"),
diff --git a/lib/inets/vsn.mk b/lib/inets/vsn.mk
index dbae5e4b3c..7d11916454 100644
--- a/lib/inets/vsn.mk
+++ b/lib/inets/vsn.mk
@@ -18,6 +18,6 @@
# %CopyrightEnd%
APPLICATION = inets
-INETS_VSN = 5.10.4
+INETS_VSN = 5.10.5
PRE_VSN =
APP_VSN = "$(APPLICATION)-$(INETS_VSN)$(PRE_VSN)"
diff --git a/lib/kernel/doc/src/gen_sctp.xml b/lib/kernel/doc/src/gen_sctp.xml
index dc9e4766a9..ee8cd441d4 100644
--- a/lib/kernel/doc/src/gen_sctp.xml
+++ b/lib/kernel/doc/src/gen_sctp.xml
@@ -961,7 +961,7 @@
<pre> #sctp_paddrinfo{
assoc_id = assoc_id(),
address = {IP, Port},
- state = inactive | active,
+ state = inactive | active | unconfirmed,
cwnd = integer(),
srtt = integer(),
rto = integer(),
diff --git a/lib/kernel/doc/src/inet.xml b/lib/kernel/doc/src/inet.xml
index 8dd311e5cd..77a8caaaf6 100644
--- a/lib/kernel/doc/src/inet.xml
+++ b/lib/kernel/doc/src/inet.xml
@@ -332,23 +332,23 @@ fe80::204:acff:fe17:bf38
<taglist>
<tag><c>recv_avg</c></tag>
<item>
- <p>Average size of packets in bytes received to the socket.</p>
+ <p>Average size of packets in bytes received by the socket.</p>
</item>
<tag><c>recv_cnt</c></tag>
<item>
- <p>Number of packets received to the socket.</p>
+ <p>Number of packets received by the socket.</p>
</item>
<tag><c>recv_dvi</c></tag>
<item>
- <p>Average packet size deviation in bytes received to the socket.</p>
+ <p>Average packet size deviation in bytes received by the socket.</p>
</item>
<tag><c>recv_max</c></tag>
<item>
- <p>The size of the largest packet in bytes received to the socket.</p>
+ <p>The size of the largest packet in bytes received by the socket.</p>
</item>
<tag><c>recv_oct</c></tag>
<item>
- <p>Number of bytes received to the socket.</p>
+ <p>Number of bytes received by the socket.</p>
</item>
<tag><c>send_avg</c></tag>
diff --git a/lib/kernel/src/gen_udp.erl b/lib/kernel/src/gen_udp.erl
index 70dceb3679..860eec10a0 100644
--- a/lib/kernel/src/gen_udp.erl
+++ b/lib/kernel/src/gen_udp.erl
@@ -78,7 +78,7 @@
ipv6_v6only.
-type socket() :: port().
--export_type([option/0, option_name/0]).
+-export_type([option/0, option_name/0, socket/0]).
-spec open(Port) -> {ok, Socket} | {error, Reason} when
Port :: inet:port_number(),
diff --git a/lib/kernel/src/hipe_unified_loader.erl b/lib/kernel/src/hipe_unified_loader.erl
index e5928c7b63..2d124d95b7 100644
--- a/lib/kernel/src/hipe_unified_loader.erl
+++ b/lib/kernel/src/hipe_unified_loader.erl
@@ -827,7 +827,6 @@ patch_to_emu_step1(Mod) ->
%% were added as the result of dynamic apply calls. We must
%% purge them too, but we have no explicit record of them.
%% Therefore invalidate all native addresses for the module.
- %% emu_make_stubs/1 will repair the ones for compiled static calls.
hipe_bifs:invalidate_funinfo_native_addresses(MFAs),
%% Find all call sites that call these MFAs. As a side-effect,
%% create native stubs for any MFAs that are referred.
@@ -841,7 +840,6 @@ patch_to_emu_step1(Mod) ->
%% Step 2 must occur after the new BEAM stub module is created.
patch_to_emu_step2(ReferencesToPatch) ->
- emu_make_stubs(ReferencesToPatch),
redirect(ReferencesToPatch).
-spec is_loaded(Module::atom()) -> boolean().
@@ -852,21 +850,6 @@ is_loaded(M) when is_atom(M) ->
catch _:_ -> false
end.
--ifdef(notdef).
-emu_make_stubs([{MFA,_Refs}|Rest]) ->
- make_stub(MFA),
- emu_make_stubs(Rest);
-emu_make_stubs([]) ->
- [].
-
-make_stub({_,_,A} = MFA) ->
- EmuAddress = hipe_bifs:get_emu_address(MFA),
- StubAddress = hipe_bifs:make_native_stub(EmuAddress, A),
- hipe_bifs:set_funinfo_native_address(MFA, StubAddress).
--else.
-emu_make_stubs(_) -> [].
--endif.
-
%%--------------------------------------------------------------------
%% Given a list of MFAs, tag them with their referred_from references.
%% The resulting {MFA,Refs} list is later passed to redirect/1, once
diff --git a/lib/kernel/src/inet.erl b/lib/kernel/src/inet.erl
index 43bab8bcf0..ec2c350931 100644
--- a/lib/kernel/src/inet.erl
+++ b/lib/kernel/src/inet.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -1070,7 +1070,7 @@ gethostbyname_tm(Name, Type, Timer, [wins|_]=Opts) ->
gethostbyname_tm_native(Name, Type, Timer, Opts);
gethostbyname_tm(Name, Type, Timer, [native|_]=Opts) ->
gethostbyname_tm_native(Name, Type, Timer, Opts);
-gethostbyname_tm(Name, Type, Timer, [_|_]=Opts) ->
+gethostbyname_tm(Name, Type, Timer, [_|Opts]) ->
gethostbyname_tm(Name, Type, Timer, Opts);
%% Make sure we always can look up our own hostname.
gethostbyname_tm(Name, Type, Timer, []) ->
diff --git a/lib/kernel/src/standard_error.erl b/lib/kernel/src/standard_error.erl
index 10cf77e0d4..1c43063937 100644
--- a/lib/kernel/src/standard_error.erl
+++ b/lib/kernel/src/standard_error.erl
@@ -63,7 +63,7 @@ server(PortName,PortSettings) ->
run(Port).
run(P) ->
- put(unicode,false),
+ put(encoding, latin1),
server_loop(P).
server_loop(Port) ->
@@ -95,25 +95,47 @@ do_io_request(Req, From, ReplyAs, Port) ->
io_reply(From, ReplyAs, Reply).
%% New in R13B
-% Wide characters (Unicode)
-io_request({put_chars,Encoding,Chars}, Port) -> % Binary new in R9C
- put_chars(wrap_characters_to_binary(Chars,Encoding,
- case get(unicode) of
- true -> unicode;
- _ -> latin1
- end), Port);
-io_request({put_chars,Encoding,Mod,Func,Args}, Port) ->
- Result = case catch apply(Mod,Func,Args) of
- Data when is_list(Data); is_binary(Data) ->
- wrap_characters_to_binary(Data,Encoding,
- case get(unicode) of
- true -> unicode;
- _ -> latin1
- end);
- Undef ->
- Undef
- end,
- put_chars(Result, Port);
+%% Encoding option (unicode/latin1)
+io_request({put_chars,unicode,Chars}, Port) ->
+ case wrap_characters_to_binary(Chars, unicode, get(encoding)) of
+ error ->
+ {error,{error,put_chars}};
+ Bin ->
+ put_chars(Bin, Port)
+ end;
+io_request({put_chars,unicode,Mod,Func,Args}, Port) ->
+ case catch apply(Mod, Func, Args) of
+ Data when is_list(Data); is_binary(Data) ->
+ case wrap_characters_to_binary(Data, unicode, get(encoding)) of
+ Bin when is_binary(Bin) ->
+ put_chars(Bin, Port);
+ error ->
+ {error,{error,put_chars}}
+ end;
+ _ ->
+ {error,{error,put_chars}}
+ end;
+io_request({put_chars,latin1,Chars}, Port) ->
+ case catch unicode:characters_to_binary(Chars, latin1, get(encoding)) of
+ Data when is_binary(Data) ->
+ put_chars(Data, Port);
+ _ ->
+ {error,{error,put_chars}}
+ end;
+io_request({put_chars,latin1,Mod,Func,Args}, Port) ->
+ case catch apply(Mod, Func, Args) of
+ Data when is_list(Data); is_binary(Data) ->
+ case
+ catch unicode:characters_to_binary(Data, latin1, get(encoding))
+ of
+ Bin when is_binary(Bin) ->
+ put_chars(Bin, Port);
+ _ ->
+ {error,{error,put_chars}}
+ end;
+ _ ->
+ {error,{error,put_chars}}
+ end;
%% BC if called from pre-R13 node
io_request({put_chars,Chars}, Port) ->
io_request({put_chars,latin1,Chars}, Port);
@@ -134,10 +156,10 @@ io_request({get_geometry,rows},Port) ->
_ ->
{error,{error,enotsup}}
end;
-io_request({getopts,[]}, Port) ->
- getopts(Port);
-io_request({setopts,Opts}, Port) when is_list(Opts) ->
- setopts(Opts, Port);
+io_request(getopts, _Port) ->
+ getopts();
+io_request({setopts,Opts}, _Port) when is_list(Opts) ->
+ setopts(Opts);
io_request({requests,Reqs}, Port) ->
io_requests(Reqs, {ok,ok}, Port);
io_request(R, _Port) -> %Unknown request
@@ -176,47 +198,48 @@ io_reply(From, ReplyAs, Reply) ->
%% put_chars
put_chars(Chars, Port) when is_binary(Chars) ->
_ = put_port(Chars, Port),
- {ok,ok};
-put_chars(Chars, Port) ->
- case catch list_to_binary(Chars) of
- Binary when is_binary(Binary) ->
- put_chars(Binary, Port);
- _ ->
- {error,{error,put_chars}}
- end.
+ {ok,ok}.
%% setopts
-setopts(Opts0,Port) ->
- Opts = proplists:unfold(
- proplists:substitute_negations(
- [{latin1,unicode}],
- Opts0)),
+setopts(Opts0) ->
+ Opts = expand_encoding(Opts0),
case check_valid_opts(Opts) of
- true ->
- do_setopts(Opts,Port);
- false ->
- {error,{error,enotsup}}
+ true ->
+ do_setopts(Opts);
+ false ->
+ {error,{error,enotsup}}
end.
+
check_valid_opts([]) ->
true;
-check_valid_opts([{unicode,Valid}|T]) when Valid =:= true; Valid =:= utf8; Valid =:= false ->
+check_valid_opts([{encoding,Valid}|T]) when Valid =:= unicode;
+ Valid =:= utf8; Valid =:= latin1 ->
check_valid_opts(T);
check_valid_opts(_) ->
false.
-do_setopts(Opts, _Port) ->
- case proplists:get_value(unicode,Opts) of
- Valid when Valid =:= true; Valid =:= utf8 ->
- put(unicode,true);
- false ->
- put(unicode,false);
- undefined ->
- ok
+expand_encoding([]) ->
+ [];
+expand_encoding([latin1 | T]) ->
+ [{encoding,latin1} | expand_encoding(T)];
+expand_encoding([unicode | T]) ->
+ [{encoding,unicode} | expand_encoding(T)];
+expand_encoding([H|T]) ->
+ [H|expand_encoding(T)].
+
+do_setopts(Opts) ->
+ case proplists:get_value(encoding, Opts) of
+ Valid when Valid =:= unicode; Valid =:= utf8 ->
+ put(encoding, unicode);
+ latin1 ->
+ put(encoding, latin1);
+ undefined ->
+ ok
end,
{ok,ok}.
-getopts(_Port) ->
- Uni = {unicode, get(unicode) =:= true},
+getopts() ->
+ Uni = {encoding,get(encoding)},
{ok,[Uni]}.
wrap_characters_to_binary(Chars,From,To) ->
@@ -227,17 +250,17 @@ wrap_characters_to_binary(Chars,From,To) ->
_Else ->
16#10ffff
end,
- unicode:characters_to_binary(
- [ case X of
- $\n ->
- if
- TrNl ->
- "\r\n";
- true ->
- $\n
- end;
- High when High > Limit ->
- ["\\x{",erlang:integer_to_list(X, 16),$}];
- Ordinary ->
- Ordinary
- end || X <- unicode:characters_to_list(Chars,From) ],unicode,To).
+ case catch unicode:characters_to_list(Chars, From) of
+ L when is_list(L) ->
+ unicode:characters_to_binary(
+ [ case X of
+ $\n when TrNl ->
+ "\r\n";
+ High when High > Limit ->
+ ["\\x{",erlang:integer_to_list(X, 16),$}];
+ Low ->
+ Low
+ end || X <- L ], unicode, To);
+ _ ->
+ error
+ end.
diff --git a/lib/kernel/test/Makefile b/lib/kernel/test/Makefile
index f1b8a105ed..ef351a25fb 100644
--- a/lib/kernel/test/Makefile
+++ b/lib/kernel/test/Makefile
@@ -77,7 +77,8 @@ MODULES= \
ignore_cores \
zlib_SUITE \
loose_node \
- sendfile_SUITE
+ sendfile_SUITE \
+ standard_error_SUITE
APP_FILES = \
appinc.app \
diff --git a/lib/kernel/test/inet_SUITE.erl b/lib/kernel/test/inet_SUITE.erl
index d45dfc2173..849013ac79 100644
--- a/lib/kernel/test/inet_SUITE.erl
+++ b/lib/kernel/test/inet_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -36,6 +36,7 @@
gethostnative_parallell/1, cname_loop/1,
gethostnative_soft_restart/0, gethostnative_soft_restart/1,
gethostnative_debug_level/0, gethostnative_debug_level/1,
+ lookup_bad_search_option/1,
getif/1,
getif_ifr_name_overflow/1,getservbyname_overflow/1, getifaddrs/1,
parse_strict_address/1, simple_netns/1, simple_netns_open/1]).
@@ -52,6 +53,7 @@ all() ->
ipv4_to_ipv6, host_and_addr, {group, parse},
t_gethostnative, gethostnative_parallell, cname_loop,
gethostnative_debug_level, gethostnative_soft_restart,
+ lookup_bad_search_option,
getif, getif_ifr_name_overflow, getservbyname_overflow,
getifaddrs, parse_strict_address, simple_netns, simple_netns_open].
@@ -908,6 +910,21 @@ lookup_loop([H|Hs], Delay, Tag, Parent, Cnt, Hosts) ->
+lookup_bad_search_option(suite) ->
+ [];
+lookup_bad_search_option(doc) ->
+ ["Test lookup with erroneously configured lookup option (OTP-12133)"];
+lookup_bad_search_option(Config) when is_list(Config) ->
+ Db = inet_db,
+ %% The bad option can not enter through inet_db:set_lookup/1,
+ %% but through e.g .inetrc.
+ ets:insert(Db, {res_lookup,[lookup_bad_search_option]}),
+ {ok,Hostname} = inet:gethostname(),
+ {ok,_Hent} = inet:gethostbyname(Hostname), % Will hang loop for this bug
+ ok.
+
+
+
getif(suite) ->
[];
getif(doc) ->
diff --git a/lib/kernel/test/standard_error_SUITE.erl b/lib/kernel/test/standard_error_SUITE.erl
new file mode 100644
index 0000000000..b290454b40
--- /dev/null
+++ b/lib/kernel/test/standard_error_SUITE.erl
@@ -0,0 +1,38 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(standard_error_SUITE).
+
+-export([all/0,suite/0]).
+-export([badarg/1,getopts/1]).
+
+suite() ->
+ [{ct_hooks,[ts_install_cth]}].
+
+all() ->
+ [badarg,getopts].
+
+badarg(Config) when is_list(Config) ->
+ {'EXIT',{badarg,_}} = (catch io:put_chars(standard_error, [oops])),
+ true = erlang:is_process_alive(whereis(standard_error)),
+ ok.
+
+getopts(Config) when is_list(Config) ->
+ [{encoding,latin1}] = io:getopts(standard_error),
+ ok.
diff --git a/lib/mnesia/doc/src/Mnesia_chap5.xmlsrc b/lib/mnesia/doc/src/Mnesia_chap5.xmlsrc
index 65b950bd46..127c23e0f7 100644
--- a/lib/mnesia/doc/src/Mnesia_chap5.xmlsrc
+++ b/lib/mnesia/doc/src/Mnesia_chap5.xmlsrc
@@ -867,6 +867,7 @@ ok
</section>
<section>
+ <marker id="event_handling"></marker>
<title>Mnesia Event Handling</title>
<p>System events and table events are the two categories of events
that Mnesia will generate in various situations.
diff --git a/lib/mnesia/doc/src/mnesia.xml b/lib/mnesia/doc/src/mnesia.xml
index 268dc18e65..856a7594a7 100644
--- a/lib/mnesia/doc/src/mnesia.xml
+++ b/lib/mnesia/doc/src/mnesia.xml
@@ -151,9 +151,9 @@ If a new item is inserted with the same key as
</item>
<item>
<p><c>local_content</c> When an application requires
- tables whose contents is local to each node,
+ tables whose contents are local to each node,
<c>local_content</c> tables may be used. The name of the
- table is known to all Mnesia nodes, but its contents is
+ table is known to all Mnesia nodes, but its contents are
unique on each node. This means that access to such a table
must be done locally. Set the <c>local_content</c> field to
<c>true</c> if you want to enable the <c>local_content</c>
@@ -579,7 +579,7 @@ mnesia:add_table_index(person, age)
<desc>
<p>The tables are backed up to external media using the backup
module <c>BackupMod</c>. Tables with the local contents
- property is being backed up as they exist on the current
+ property are backed up as they exist on the current
node. <c>BackupMod</c> is the default backup callback
module obtained by
<c>mnesia:system_info(backup_module)</c>. See the User's
@@ -863,7 +863,7 @@ mnesia:create_table(person,
{attributes, record_info(fields,person)}]).
</code>
<p>The specification of <c>index</c> and <c>attributes</c> may be
- hard coded as <c>{index, [2]}</c> and
+ hard coded as <c>{index, [4]}</c> and
<c>{attributes, [name, age, address, salary, children]}</c>
respectively.
</p>
@@ -2188,12 +2188,13 @@ mnesia:create_table(employee,
</desc>
</func>
<func>
- <name>subscribe(EventCategory)</name>
+ <name>subscribe(EventCategory) -> {ok, Node} | {error, Reason} </name>
<fsummary>Subscribe to events of type <c>EventCategory</c>.</fsummary>
<desc>
<p>Ensures that a copy of all events of type
<c>EventCategory</c> are sent to the caller. The event
- types available are described in the Mnesia User's Guide.</p>
+ types available are described in the Mnesia User's Guide at <seealso marker="Mnesia_chap5#event_handling">Mnesia Event Handling</seealso>.</p>
+ <p><c>Node</c> is the local node. For table events to be subscribed, mnesia must have a readable local copy of the table on the node.</p>
</desc>
</func>
<func>
@@ -2861,11 +2862,12 @@ raise(Name, Amount) ->
</desc>
</func>
<func>
- <name>unsubscribe(EventCategory)</name>
+ <name>unsubscribe(EventCategory) -> {ok, Node} | {error, Reason} </name>
<fsummary>Subscribe to events of type <c>EventCategory</c>.</fsummary>
<desc>
<p>Stops sending events of type
<c>EventCategory</c> to the caller.</p>
+ <p><c>Node</c> is the local node.</p>
</desc>
</func>
<func>
@@ -3017,6 +3019,12 @@ raise(Name, Amount) ->
totally unpredictable.</p>
</item>
<item>
+ <p><c>-mnesia dump_disc_copies_at_startup true | false</c>.
+ If set to false, this disables the dumping of <c>disc_copies</c>
+ tables during startup while tables are being loaded. The default
+ is true.</p>
+ </item>
+ <item>
<p><c>-mnesia dump_log_load_regulation true | false</c>.
Controls if the log dumps should be performed as fast as
possible or if the dumper should do its own load
diff --git a/lib/mnesia/src/mnesia_controller.erl b/lib/mnesia/src/mnesia_controller.erl
index 5a9bae54da..aa72de7594 100644
--- a/lib/mnesia/src/mnesia_controller.erl
+++ b/lib/mnesia/src/mnesia_controller.erl
@@ -51,6 +51,7 @@
force_load_table/1,
async_dump_log/1,
sync_dump_log/1,
+ snapshot_dcd/1,
connect_nodes/1,
connect_nodes/2,
wait_for_schema_commit_lock/0,
@@ -139,7 +140,8 @@ max_loaders() ->
-record(block_controller, {owner}).
-record(dump_log, {initiated_by,
- opt_reply_to
+ opt_reply_to,
+ operation = dump_log
}).
-record(net_load, {table,
@@ -201,6 +203,15 @@ async_dump_log(InitBy) ->
?SERVER_NAME ! {async_dump_log, InitBy},
ok.
+snapshot_dcd(Tables) when is_list(Tables) ->
+ case [T || T <- Tables,
+ mnesia_lib:storage_type_at_node(node(), T) =/= disc_copies] of
+ [] ->
+ call({snapshot_dcd, Tables});
+ BadTabs ->
+ {error, {not_disc_copies, BadTabs}}
+ end.
+
%% Wait for tables to be active
%% If needed, we will wait for Mnesia to start
%% If Mnesia stops, we will wait for Mnesia to restart
@@ -646,6 +657,15 @@ handle_call({sync_dump_log, InitBy}, From, State) ->
State2 = add_worker(Worker, State),
noreply(State2);
+handle_call({snapshot_dcd, Tables}, From, State) ->
+ Worker = #dump_log{initiated_by = user,
+ opt_reply_to = From,
+ operation = fun() ->
+ mnesia_dumper:snapshot_dcd(Tables)
+ end},
+ State2 = add_worker(Worker, State),
+ noreply(State2);
+
handle_call(wait_for_schema_commit_lock, From, State) ->
Worker = #schema_commit_lock{owner = From},
State2 = add_worker(Worker, State),
@@ -2089,7 +2109,12 @@ start_remote_sender(Node, Tab, Receiver, Storage) ->
dump_and_reply(ReplyTo, Worker) ->
%% No trap_exit, die intentionally instead
- Res = mnesia_dumper:opt_dump_log(Worker#dump_log.initiated_by),
+ Res = case Worker#dump_log.operation of
+ dump_log ->
+ mnesia_dumper:opt_dump_log(Worker#dump_log.initiated_by);
+ F when is_function(F, 0) ->
+ F()
+ end,
ReplyTo ! #dumper_done{worker_pid = self(),
worker_res = Res},
unlink(ReplyTo),
diff --git a/lib/mnesia/src/mnesia_dumper.erl b/lib/mnesia/src/mnesia_dumper.erl
index 14665797a0..509b765dee 100644
--- a/lib/mnesia/src/mnesia_dumper.erl
+++ b/lib/mnesia/src/mnesia_dumper.erl
@@ -34,11 +34,13 @@
-export([
get_log_writes/0,
incr_log_writes/0,
+ needs_dump_ets/1,
raw_dump_table/2,
raw_named_dump_table/2,
start_regulator/0,
opt_dump_log/1,
- update/3
+ update/3,
+ snapshot_dcd/1
]).
%% Internal stuff
@@ -99,6 +101,19 @@ opt_dump_log(InitBy) ->
end,
perform_dump(InitBy, Reg).
+snapshot_dcd(Tables) ->
+ lists:foreach(
+ fun(Tab) ->
+ case mnesia_lib:storage_type_at_node(node(), Tab) of
+ disc_copies ->
+ mnesia_log:ets2dcd(Tab);
+ _ ->
+ %% Storage type was checked before queueing the op, though
+ skip
+ end
+ end, Tables),
+ dumped.
+
%% Scan for decisions
perform_dump(InitBy, Regulator) when InitBy == scan_decisions ->
?eval_debug_fun({?MODULE, perform_dump}, [InitBy]),
@@ -981,28 +996,10 @@ open_files(_Tab, _Storage, _UpdateInPlace, _InitBy) ->
false.
open_disc_copies(Tab, InitBy) ->
- DclF = mnesia_lib:tab2dcl(Tab),
- DumpEts =
- case file:read_file_info(DclF) of
- {error, enoent} ->
- false;
- {ok, DclInfo} ->
- DcdF = mnesia_lib:tab2dcd(Tab),
- case file:read_file_info(DcdF) of
- {error, Reason} ->
- mnesia_lib:dbg_out("File ~p info_error ~p ~n",
- [DcdF, Reason]),
- true;
- {ok, DcdInfo} ->
- Mul = case ?catch_val(dc_dump_limit) of
- {'EXIT', _} -> ?DumpToEtsMultiplier;
- Val -> Val
- end,
- DcdInfo#file_info.size =< (DclInfo#file_info.size * Mul)
- end
- end,
+ DumpEts = needs_dump_ets(Tab),
if
DumpEts == false; InitBy == startup ->
+ DclF = mnesia_lib:tab2dcl(Tab),
mnesia_log:open_log({?MODULE,Tab},
mnesia_log:dcl_log_header(),
DclF,
@@ -1017,6 +1014,27 @@ open_disc_copies(Tab, InitBy) ->
false
end.
+needs_dump_ets(Tab) ->
+ DclF = mnesia_lib:tab2dcl(Tab),
+ case file:read_file_info(DclF) of
+ {error, enoent} ->
+ false;
+ {ok, DclInfo} ->
+ DcdF = mnesia_lib:tab2dcd(Tab),
+ case file:read_file_info(DcdF) of
+ {error, Reason} ->
+ mnesia_lib:dbg_out("File ~p info_error ~p ~n",
+ [DcdF, Reason]),
+ true;
+ {ok, DcdInfo} ->
+ Mul = case ?catch_val(dc_dump_limit) of
+ {'EXIT', _} -> ?DumpToEtsMultiplier;
+ Val -> Val
+ end,
+ DcdInfo#file_info.size =< (DclInfo#file_info.size * Mul)
+ end
+ end.
+
%% Always opens the dcl file for writing overriding already_dumped
%% mechanismen, used for schema transactions.
open_dcl(Tab) ->
diff --git a/lib/mnesia/src/mnesia_loader.erl b/lib/mnesia/src/mnesia_loader.erl
index 530317bcdd..cbb3d7e430 100644
--- a/lib/mnesia/src/mnesia_loader.erl
+++ b/lib/mnesia/src/mnesia_loader.erl
@@ -69,9 +69,10 @@ do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == disc_copies ->
ignore;
_ ->
mnesia_monitor:mktab(Tab, Args),
- Count = mnesia_log:dcd2ets(Tab, Repair),
- case ets:info(Tab, size) of
- X when X < Count * 4 ->
+ _Count = mnesia_log:dcd2ets(Tab, Repair),
+ case mnesia_monitor:get_env(dump_disc_copies_at_startup)
+ andalso mnesia_dumper:needs_dump_ets(Tab) of
+ true ->
ok = mnesia_log:ets2dcd(Tab);
_ ->
ignore
diff --git a/lib/mnesia/src/mnesia_monitor.erl b/lib/mnesia/src/mnesia_monitor.erl
index 6fc1a394a6..a0e0e630ec 100644
--- a/lib/mnesia/src/mnesia_monitor.erl
+++ b/lib/mnesia/src/mnesia_monitor.erl
@@ -664,6 +664,7 @@ env() ->
backup_module,
debug,
dir,
+ dump_disc_copies_at_startup,
dump_log_load_regulation,
dump_log_time_threshold,
dump_log_update_in_place,
@@ -692,6 +693,8 @@ default_env(debug) ->
default_env(dir) ->
Name = lists:concat(["Mnesia.", node()]),
filename:absname(Name);
+default_env(dump_disc_copies_at_startup) ->
+ true;
default_env(dump_log_load_regulation) ->
false;
default_env(dump_log_time_threshold) ->
@@ -741,6 +744,7 @@ do_check_type(debug, trace) -> trace;
do_check_type(debug, true) -> debug;
do_check_type(debug, verbose) -> verbose;
do_check_type(dir, V) -> filename:absname(V);
+do_check_type(dump_disc_copies_at_startup, B) -> bool(B);
do_check_type(dump_log_load_regulation, B) -> bool(B);
do_check_type(dump_log_time_threshold, I) when is_integer(I), I > 0 -> I;
do_check_type(dump_log_update_in_place, B) -> bool(B);
diff --git a/lib/mnesia/src/mnesia_recover.erl b/lib/mnesia/src/mnesia_recover.erl
index b6492707e2..eeb4fa0ced 100644
--- a/lib/mnesia/src/mnesia_recover.erl
+++ b/lib/mnesia/src/mnesia_recover.erl
@@ -689,12 +689,29 @@ handle_call({connect_nodes, Ns}, From, State) ->
%% called from handle_info
gen_server:reply(From, {[], AlreadyConnected}),
{noreply, State};
- GoodNodes ->
+ ProbablyGoodNodes ->
%% Now we have agreed upon a protocol with some new nodes
- %% and we may use them when we recover transactions
+ %% and we may use them when we recover transactions.
+ %%
+ %% Just in case Mnesia was stopped on some of those nodes
+ %% between the protocol negotiation and now, we check one
+ %% more time the state of Mnesia.
+ %%
+ %% Of course, there is still a chance that mnesia_down
+ %% events occur during this check and we miss them. To
+ %% prevent it, handle_cast({mnesia_down, ...}, ...) removes
+ %% the down node again, in addition to mnesia_down/1.
+ %%
+ %% See a comment in handle_cast({mnesia_down, ...}, ...).
+ Verify = fun(N) ->
+ Run = mnesia_lib:is_running(N),
+ Run =:= yes orelse Run =:= starting
+ end,
+ GoodNodes = [N || N <- ProbablyGoodNodes, Verify(N)],
+
mnesia_lib:add_list(recover_nodes, GoodNodes),
cast({announce_all, GoodNodes}),
- case get_master_nodes(schema) of
+ case get_master_nodes(schema) of
[] ->
Context = starting_partitioned_network,
mnesia_monitor:detect_inconcistency(GoodNodes, Context);
@@ -842,6 +859,14 @@ handle_cast({what_decision, Node, OtherD}, State) ->
{noreply, State};
handle_cast({mnesia_down, Node}, State) ->
+ %% The node was already removed from recover_nodes in mnesia_down/1,
+ %% but we do it again here in the mnesia_recover process, in case
+ %% another event incorrectly added it back. This can happen during
+ %% Mnesia startup which takes time betweenthe connection, the
+ %% protocol negotiation and the merge of the schema.
+ %%
+ %% See a comment in handle_call({connect_nodes, ...), ...).
+ mnesia_lib:del(recover_nodes, Node),
case State#state.unclear_decision of
undefined ->
{noreply, State};
diff --git a/lib/observer/doc/src/observer_ug.xml b/lib/observer/doc/src/observer_ug.xml
index 62f99c5210..fcb42f6c31 100644
--- a/lib/observer/doc/src/observer_ug.xml
+++ b/lib/observer/doc/src/observer_ug.xml
@@ -104,6 +104,29 @@
<note>
<p><em>Reds</em> can be presented as accumulated values or as values since last update.</p>
</note>
+ <p><c>Process info</c> open a detailed information window on the selected process.
+ <taglist>
+ <tag>Process Information</tag>
+ <item>Shows the process information.</item>
+ <tag>Messages</tag>
+ <item>Shows the process messages.</item>
+ <tag>Dictionary</tag>
+ <item>Shows the process dictionary.</item>
+ <tag>Stack Trace</tag>
+ <item>Shows the process current stack trace.</item>
+ <tag>State</tag>
+ <item>Show the process state.</item>
+ <tag>Log</tag>
+ <item>If enabled and available, show the process SASL log entries.</item>
+ </taglist>
+ <note>
+ <p><c>Log</c> needs SASL application to be started on the observed node, with log_mf_h as log handler.
+ The Observed node must be R16B02 or higher.
+ <c>rb</c> server must not be started on the observed node when clicking on menu 'Log/Toggle log view'.
+ <c>rb</c> server will be stopped on the observed node when exiting or changing observed node.
+ </p>
+ </note>
+ </p>
<p><c>Trace Processes</c> will add the selected process identifiers to the <c>Trace Overview</c> view and the
node the processes reside on will be added as well.
<c>Trace Named Processes</c> will add the registered name of processes. This can be useful
diff --git a/lib/observer/src/observer_html_lib.erl b/lib/observer/src/observer_html_lib.erl
index c279218707..53197078cf 100644
--- a/lib/observer/src/observer_html_lib.erl
+++ b/lib/observer/src/observer_html_lib.erl
@@ -60,7 +60,8 @@ expandable_term_body(Heading,[],_Tab) ->
"StackDump" -> "No stack dump was found";
"Dictionary" -> "No dictionary was found";
"ProcState" -> "Information could not be retrieved,"
- " system messages may not be handled by this process."
+ " system messages may not be handled by this process.";
+ "SaslLog" -> "No log entry was found"
end];
expandable_term_body(Heading,Expanded,Tab) ->
Attr = "BORDER=0 CELLPADDING=0 CELLSPACING=1 WIDTH=100%",
@@ -102,7 +103,10 @@ expandable_term_body(Heading,Expanded,Tab) ->
element(1, lists:mapfoldl(fun(Entry, Even) ->
{proc_state(Tab, Entry,Even),
not Even}
- end, true, Expanded))]);
+ end, true, Expanded))]);
+ "SaslLog" ->
+ table(Attr,
+ [tr("BGCOLOR=white",[td("ALIGN=left", pre(href_proc_port(Expanded)))])]) ;
_ ->
table(Attr,
[tr(
diff --git a/lib/observer/src/observer_procinfo.erl b/lib/observer/src/observer_procinfo.erl
index 8e8a37fc93..a8512894f9 100644
--- a/lib/observer/src/observer_procinfo.erl
+++ b/lib/observer/src/observer_procinfo.erl
@@ -43,6 +43,8 @@
-record(worker, {panel, callback}).
+-record(io, {rdata=""}).
+
start(Process, ParentFrame, Parent) ->
wx_object:start_link(?MODULE, [Process, ParentFrame, Parent], []).
@@ -69,6 +71,10 @@ init([Pid, ParentFrame, Parent]) ->
DictPage = init_panel(Notebook, "Dictionary", [Pid,Table], fun init_dict_page/3),
StackPage = init_panel(Notebook, "Stack Trace", [Pid], fun init_stack_page/2),
StatePage = init_panel(Notebook, "State", [Pid,Table], fun init_state_page/3),
+ Ps = case gen_server:call(observer, log_status) of
+ true -> [init_panel(Notebook, "Log", [Pid,Table], fun init_log_page/3)];
+ false -> []
+ end,
wxFrame:connect(Frame, close_window),
wxMenu:connect(Frame, command_menu_selected),
@@ -78,7 +84,7 @@ init([Pid, ParentFrame, Parent]) ->
pid=Pid,
frame=Frame,
notebook=Notebook,
- pages=[ProcessPage,MessagePage,DictPage,StackPage,StatePage],
+ pages=[ProcessPage,MessagePage,DictPage,StackPage,StatePage|Ps],
expand_table=Table
}}
catch error:{badrpc, _} ->
@@ -327,6 +333,26 @@ fetch_state_info2(Pid, M) ->
{badrpc,{'EXIT',{timeout, _}}} -> []
end.
+init_log_page(Parent, Pid, Table) ->
+ Win = observer_lib:html_window(Parent),
+ Update = fun() ->
+ Fd = spawn_link(fun() -> io_server() end),
+ rpc:call(node(Pid), rb, rescan, [[{start_log, Fd}]]),
+ rpc:call(node(Pid), rb , grep, [local_pid_str(Pid)]),
+ Logs = io_get_data(Fd),
+ %% Replace remote local pid notation to global notation
+ Pref = global_pid_node_pref(Pid),
+ ExpPid = re:replace(Logs,"<0\.","<" ++ Pref ++ ".",[global, {return, list}]),
+ %% Try to keep same look by removing blanks at right of rewritten PID
+ NbBlanks = length(Pref) - 1,
+ Re = "(<" ++ Pref ++ "\.[^>]{1,}>)[ ]{"++ integer_to_list(NbBlanks) ++ "}",
+ Look = re:replace(ExpPid, Re, "\\1", [global, {return, list}]),
+ Html = observer_html_lib:expandable_term("SaslLog", Look, Table),
+ wxHtmlWindow:setPage(Win, Html)
+ end,
+ Update(),
+ {Win, Update}.
+
create_menus(MenuBar) ->
Menus = [{"File", [#create_menu{id=?wxID_CLOSE, text="Close"}]},
{"View", [#create_menu{id=?REFRESH, text="Refresh\tCtrl-R"}]}],
@@ -409,3 +435,55 @@ filter_monitor_info() ->
Ms = proplists:get_value(monitors, Data),
[Pid || {process, Pid} <- Ms]
end.
+
+local_pid_str(Pid) ->
+ %% observer can observe remote nodes
+ %% There is no function to get the local
+ %% pid from the remote pid ...
+ %% So grep will fail to find remote pid in remote local log.
+ %% i.e. <4589.42.1> will not be found, but <0.42.1> will
+ %% Let's replace first integer by zero
+ "<0" ++ re:replace(pid_to_list(Pid),"\<([0-9]{1,})","",[{return, list}]).
+
+global_pid_node_pref(Pid) ->
+ %% Global PID node prefix : X of <X.Y.Z>
+ string:strip(string:sub_word(pid_to_list(Pid),1,$.),left,$<).
+
+
+io_get_data(Pid) ->
+ Pid ! {self(), get_data_and_close},
+ receive
+ {Pid, data, Data} -> lists:flatten(Data)
+ end.
+
+io_server() ->
+ io_server(#io{}).
+
+io_server(State) ->
+ receive
+ {io_request, From, ReplyAs, Request} ->
+ case io_request(Request,State) of
+ {Tag, Reply, NewState} when Tag =:= ok; Tag =:= error ->
+ From ! {io_reply, ReplyAs, Reply},
+ io_server(NewState);
+ {stop, Reply, _NewState} ->
+ From ! {io_reply, ReplyAs, Reply},
+ exit(Reply)
+ end;
+ {Pid, get_data_and_close} ->
+ Pid ! {self(), data, lists:reverse(State#io.rdata)},
+ normal;
+ _Unknown ->
+ io:format("~p: Unknown msg: ~p ~n",[?LINE, _Unknown]),
+ io_server(State)
+ end.
+
+io_request({put_chars, _Encoding, Chars}, State = #io{rdata=Data}) ->
+ {ok, ok, State#io{rdata=[Chars|Data]}};
+io_request({put_chars, Encoding, Module, Function, Args}, State) ->
+ try io_request({put_chars, Encoding, apply(Module, Function, Args)}, State)
+ catch _:_ -> {error, {error,Function}, State}
+ end;
+io_request(Req, State) ->
+ io:format("~p: Unknown req: ~p ~n",[?LINE, Req]),
+ State.
diff --git a/lib/observer/src/observer_wx.erl b/lib/observer/src/observer_wx.erl
index c86f5ea916..54c4092a78 100644
--- a/lib/observer/src/observer_wx.erl
+++ b/lib/observer/src/observer_wx.erl
@@ -37,6 +37,7 @@
-define(ID_CONNECT, 2).
-define(ID_NOTEBOOK, 3).
-define(ID_CDV, 4).
+-define(ID_LOGVIEW, 5).
-define(FIRST_NODES_MENU_ID, 1000).
-define(LAST_NODES_MENU_ID, 2000).
@@ -60,7 +61,8 @@
active_tab,
node,
nodes,
- prev_node=""
+ prev_node="",
+ log = false
}).
start() ->
@@ -215,14 +217,17 @@ handle_event(#wx{event=#wxNotebook{type=command_notebook_page_changing}},
{noreply, State#state{active_tab=Pid}}
end;
-handle_event(#wx{event = #wxClose{}}, State) ->
- {stop, normal, State};
-
handle_event(#wx{id = ?ID_CDV, event = #wxCommand{type = command_menu_selected}}, State) ->
spawn(crashdump_viewer, start, []),
{noreply, State};
-handle_event(#wx{id = ?wxID_EXIT, event = #wxCommand{type = command_menu_selected}}, State) ->
+handle_event(#wx{event = #wxClose{}}, #state{log=LogOn} = State) ->
+ LogOn andalso rpc:block_call(State#state.node, rb, stop, []),
+ {stop, normal, State};
+
+handle_event(#wx{id = ?wxID_EXIT, event = #wxCommand{type = command_menu_selected}},
+ #state{log=LogOn} = State) ->
+ LogOn andalso rpc:block_call(State#state.node, rb, stop, []),
{stop, normal, State};
handle_event(#wx{id = ?wxID_HELP, event = #wxCommand{type = command_menu_selected}}, State) ->
@@ -300,12 +305,42 @@ handle_event(#wx{id = ?ID_PING, event = #wxCommand{type = command_menu_selected}
end,
{noreply, UpdState};
-handle_event(#wx{id = Id, event = #wxCommand{type = command_menu_selected}}, State)
- when Id > ?FIRST_NODES_MENU_ID, Id < ?LAST_NODES_MENU_ID ->
+handle_event(#wx{id = ?ID_LOGVIEW, event = #wxCommand{type = command_menu_selected}},
+ #state{frame = Frame, log = PrevLog, node = Node} = State) ->
+ try
+ ok = ensure_sasl_started(Node),
+ ok = ensure_mf_h_handler_used(Node),
+ ok = ensure_rb_mode(Node, PrevLog),
+ case PrevLog of
+ false ->
+ rpc:block_call(Node, rb, start, []),
+ set_status("Observer - " ++ atom_to_list(Node) ++ " (rb_server started)"),
+ {noreply, State#state{log=true}};
+ true ->
+ rpc:block_call(Node, rb, stop, []),
+ set_status("Observer - " ++ atom_to_list(Node) ++ " (rb_server stopped)"),
+ {noreply, State#state{log=false}}
+ end
+ catch
+ throw:Reason ->
+ create_txt_dialog(Frame, Reason, "Log view status", ?wxICON_ERROR),
+ {noreply, State}
+ end;
- Node = lists:nth(Id - ?FIRST_NODES_MENU_ID, State#state.nodes),
- UpdState = change_node_view(Node, State),
- {noreply, UpdState};
+handle_event(#wx{id = Id, event = #wxCommand{type = command_menu_selected}},
+ #state{nodes= Ns , node = PrevNode, log = PrevLog} = State)
+ when Id > ?FIRST_NODES_MENU_ID, Id < ?LAST_NODES_MENU_ID ->
+ Node = lists:nth(Id - ?FIRST_NODES_MENU_ID, Ns),
+ %% Close rb_server only if another node than current one selected
+ LState = case PrevLog of
+ true -> case Node == PrevNode of
+ false -> rpc:block_call(PrevNode, rb, stop, []),
+ State#state{log=false} ;
+ true -> State
+ end;
+ false -> State
+ end,
+ {noreply, change_node_view(Node, LState)};
handle_event(Event, State) ->
Pid = get_active_pid(State),
@@ -340,6 +375,9 @@ handle_call(stop, _, State = #state{frame = Frame}) ->
wxFrame:destroy(Frame),
{stop, normal, ok, State};
+handle_call(log_status, _From, State) ->
+ {reply, State#state.log, State};
+
handle_call(_Msg, _From, State) ->
{reply, ok, State}.
@@ -422,8 +460,7 @@ return_to_localnode(Frame, Node) ->
end.
create_txt_dialog(Frame, Msg, Title, Style) ->
- MD = wxMessageDialog:new(Frame, Msg, [{style, Style}]),
- wxMessageDialog:setTitle(MD, Title),
+ MD = wxMessageDialog:new(Frame, Msg, [{style, Style}, {caption,Title}]),
wxDialog:showModal(MD),
wxDialog:destroy(MD).
@@ -569,17 +606,19 @@ default_menus(NodesMenuItems) ->
false -> {"Nodes", NodesMenuItems ++
[#create_menu{id = ?ID_CONNECT, text = "Enable distribution"}]}
end,
+ LogMenu = {"Log", [#create_menu{id = ?ID_LOGVIEW, text = "Toggle log view"}]},
case os:type() =:= {unix, darwin} of
false ->
FileMenu = {"File", [CDV, Quit]},
HelpMenu = {"Help", [About,Help]},
- [FileMenu, NodeMenu, HelpMenu];
+ [FileMenu, NodeMenu, LogMenu, HelpMenu];
true ->
%% On Mac quit and about will be moved to the "default' place
%% automagicly, so just add them to a menu that always exist.
%% But not to the help menu for some reason
- {Tag, Menus} = FileMenu,
- [{Tag, Menus ++ [About]}, NodeMenu, {"&Help", [Help]}]
+
+ {Tag, Menus} = NodeMenu,
+ [{Tag, Menus ++ [Quit,About]}, LogMenu, {"&Help", [Help]}]
end.
clean_menus(Menus, MenuBar) ->
@@ -658,3 +697,59 @@ update_node_list(State = #state{menubar=MenuBar}) ->
end,
observer_lib:create_menu_item(Dist, NodeMenu, Index),
State#state{nodes = Nodes}.
+
+ensure_sasl_started(Node) ->
+ %% is sasl started ?
+ Apps = rpc:block_call(Node, application, which_applications, []),
+ case lists:keyfind(sasl, 1, Apps) of
+ false -> throw("Error: sasl application not started."),
+ error;
+ {sasl, _, _} -> ok
+ end.
+
+ensure_mf_h_handler_used(Node) ->
+ %% is log_mf_h used ?
+ Handlers = rpc:block_call(Node, gen_event, which_handlers, [error_logger]),
+ case lists:any(fun(L)-> L == log_mf_h end, Handlers) of
+ false -> throw("Error: log_mf_h handler not used in sasl."),
+ error;
+ true -> ok
+ end.
+
+ensure_rb_mode(Node, PrevLog) ->
+ ok = ensure_rb_module_loaded(Node),
+ ok = is_rb_compatible(Node),
+ ok = is_rb_server_running(Node, PrevLog),
+ ok.
+
+
+ensure_rb_module_loaded(Node) ->
+ %% Need to ensure that module is loaded in order to detect exported
+ %% functions on interactive nodes
+ case rpc:block_call(Node, code, ensure_loaded, [rb]) of
+ {badrpc, Reason} ->
+ throw("Error: badrpc - " ++ io_lib:format("~tp",[Reason]));
+ {error, Reason} ->
+ throw("Error: rb module load error - " ++ io_lib:format("~tp",[Reason]));
+ {module,rb} ->
+ ok
+ end.
+
+is_rb_compatible(Node) ->
+ %% Simply test that rb:log_list/0 is exported
+ case rpc:block_call(Node, erlang, function_exported, [rb, log_list, 0]) of
+ false -> throw("Error: Node's Erlang release must be at least R16B02.");
+ true -> ok
+ end.
+
+is_rb_server_running(Node, LogState) ->
+ %% If already started, somebody else may use it.
+ %% We can not use it too, as far log file would be overriden. Not fair.
+ case rpc:block_call(Node, erlang, whereis, [rb_server]) of
+ Pid when is_pid(Pid), (LogState == false) ->
+ throw("Error: rb_server is already started and maybe used by someone.");
+ Pid when is_pid(Pid) ->
+ ok;
+ undefined ->
+ ok
+ end.
diff --git a/lib/observer/test/observer_SUITE.erl b/lib/observer/test/observer_SUITE.erl
index 5cf719acb1..c69fdf4bdf 100644
--- a/lib/observer/test/observer_SUITE.erl
+++ b/lib/observer/test/observer_SUITE.erl
@@ -22,6 +22,8 @@
-include_lib("wx/include/wx.hrl").
-include_lib("observer/src/observer_tv.hrl").
+-define(ID_LOGVIEW, 5).
+
%% Test server specific exports
-export([all/0, suite/0,groups/0]).
-export([init_per_testcase/2, end_per_testcase/2,
@@ -44,8 +46,9 @@ all() ->
groups() ->
[{gui, [],
- [basic
- , process_win, table_win
+ [basic,
+ process_win,
+ table_win
]
}].
@@ -107,7 +110,7 @@ appup_file(Config) when is_list(Config) ->
basic(suite) -> [];
basic(doc) -> [""];
basic(Config) when is_list(Config) ->
- timer:send_after(100, "foobar"), %% Otherwise the timer sever gets added to procs
+ timer:send_after(100, "foobar"), %% Otherwise the timer server gets added to procs
ProcsBefore = processes(),
NumProcsBefore = length(ProcsBefore),
@@ -126,7 +129,7 @@ basic(Config) when is_list(Config) ->
timer:sleep(200),
ok = wxNotebook:advanceSelection(Notebook)
end,
- %% Just verify that we can toogle trough all pages
+ %% Just verify that we can toggle through all pages
[_|_] = [Check(N, false) || N <- lists:seq(1, Count)],
%% Cause it to resize
Frame = get_top_level_parent(Notebook),
@@ -214,10 +217,27 @@ test_page(Title, Window) ->
process_win(suite) -> [];
process_win(doc) -> [""];
process_win(Config) when is_list(Config) ->
+ % Stop SASL if already started
+ SaslStart = case whereis(sasl_sup) of
+ undefined -> false;
+ _ -> application:stop(sasl),
+ true
+ end,
+ % Define custom sasl and log_mf_h app vars
+ Privdir=?config(priv_dir,Config),
+ application:set_env(sasl, sasl_error_logger, tty),
+ application:set_env(sasl, error_logger_mf_dir, Privdir),
+ application:set_env(sasl, error_logger_mf_maxbytes, 1000),
+ application:set_env(sasl, error_logger_mf_maxfiles, 5),
+ application:start(sasl),
ok = observer:start(),
ObserverNB = setup_whitebox_testing(),
Parent = get_top_level_parent(ObserverNB),
- Frame = observer_procinfo:start(self(), Parent, self()),
+ % Activate log view
+ whereis(observer) ! #wx{id = ?ID_LOGVIEW, event = #wxCommand{type = command_menu_selected}},
+ timer:sleep(1000),
+ % Process window tests (use sasl_sup for a non empty Log tab)
+ Frame = observer_procinfo:start(whereis(sasl_sup), Parent, self()),
PIPid = wx_object:get_pid(Frame),
PIPid ! {get_debug_info, self()},
Notebook = receive {procinfo_debug, NB} -> NB end,
@@ -229,6 +249,11 @@ process_win(Config) when is_list(Config) ->
[_|_] = [Check(N) || N <- lists:seq(1, Count)],
PIPid ! #wx{event=#wxClose{type=close_window}},
observer:stop(),
+ application:stop(sasl),
+ case SaslStart of
+ true -> application:start(sasl);
+ false -> ok
+ end,
ok.
table_win(suite) -> [];
diff --git a/lib/os_mon/c_src/memsup.c b/lib/os_mon/c_src/memsup.c
index 409db84aa7..5dcab07dd8 100644
--- a/lib/os_mon/c_src/memsup.c
+++ b/lib/os_mon/c_src/memsup.c
@@ -104,7 +104,7 @@
#if !defined (__OpenBSD__) && !defined (__NetBSD__)
#include <vm/vm_param.h>
#endif
-#if defined (__FreeBSD__) || defined(__DragonFly__) || defined (__NetBSD__)
+#if defined (__FreeBSD__) || defined(__DragonFly__) || defined (__NetBSD__) || defined(__OpenBSD__)
#include <sys/vmmeter.h>
#endif
#endif
diff --git a/lib/parsetools/include/leexinc.hrl b/lib/parsetools/include/leexinc.hrl
index 938aef58f9..2657fdcfaa 100644
--- a/lib/parsetools/include/leexinc.hrl
+++ b/lib/parsetools/include/leexinc.hrl
@@ -44,6 +44,8 @@ string(Ics0, L0, Tcs, Ts) ->
%% Test for and remove the end token wrapper. Push back characters
%% are prepended to RestChars.
+-dialyzer({nowarn_function, string_cont/4}).
+
string_cont(Rest, Line, {token,T}, Ts) ->
string(Rest, Line, Rest, [T|Ts]);
string_cont(Rest, Line, {token,T,Push}, Ts) ->
@@ -113,6 +115,8 @@ token(S0, Ics0, L0, Tcs, Tlen0, Tline, A0, Alen0) ->
%% If we have a token or error then return done, else if we have a
%% skip_token then continue.
+-dialyzer({nowarn_function, token_cont/3}).
+
token_cont(Rest, Line, {token,T}) ->
{done,{ok,T,Line},Rest};
token_cont(Rest, Line, {token,T,Push}) ->
@@ -187,6 +191,8 @@ tokens(S0, Ics0, L0, Tcs, Tlen0, Tline, Ts, A0, Alen0) ->
%% a token then save it and continue, else if we have a skip_token
%% just continue.
+-dialyzer({nowarn_function, tokens_cont/4}).
+
tokens_cont(Rest, Line, {token,T}, Ts) ->
tokens(yystate(), Rest, Line, Rest, 0, Line, [T|Ts], reject, 0);
tokens_cont(Rest, Line, {token,T,Push}, Ts) ->
@@ -238,6 +244,8 @@ skip_tokens(S0, Ics0, L0, Tcs, Tlen0, Tline, Error, A0, Alen0) ->
%% Skip tokens until we have an end_token or error then return done
%% with the original rror.
+-dialyzer({nowarn_function, skip_cont/4}).
+
skip_cont(Rest, Line, {token,_T}, Error) ->
skip_tokens(yystate(), Rest, Line, Rest, 0, Line, Error, reject, 0);
skip_cont(Rest, Line, {token,_T,Push}, Error) ->
diff --git a/lib/parsetools/src/yecc.erl b/lib/parsetools/src/yecc.erl
index f4657663e6..3fcec73ce2 100644
--- a/lib/parsetools/src/yecc.erl
+++ b/lib/parsetools/src/yecc.erl
@@ -2064,11 +2064,13 @@ output_actions(St0, StateJumps, StateInfo) ->
SelS = [{State,Called} ||
{{State,_JActions}, {State,Called}} <-
lists:zip(StateJumps, lists:keysort(1, Sel))],
+ St05 =
+ fwrite(St0, <<"-dialyzer({nowarn_function, yeccpars2/7}).\n">>, []),
St10 = foldl(fun({State, Called}, St_0) ->
{State, #state_info{state_repr = IState}} =
lookup_state(StateInfo, State),
output_state_selection(St_0, State, IState, Called)
- end, St0, SelS),
+ end, St05, SelS),
St20 = fwrite(St10, <<"yeccpars2(Other, _, _, _, _, _, _) ->\n">>, []),
St = fwrite(St20,
?YECC_BUG(<<"{missing_state_in_action_table, Other}">>, []),
@@ -2089,7 +2091,8 @@ output_state_selection(St0, State, IState, Called) ->
[Comment, IState]).
output_state_actions(St, State, State, {Actions,jump_none}, SI) ->
- output_state_actions1(St, State, Actions, true, normal, SI);
+ St1 = output_state_actions_begin(St, State, Actions),
+ output_state_actions1(St1, State, Actions, true, normal, SI);
output_state_actions(St0, State, State, {Actions, Jump}, SI) ->
{Tag, To, Common} = Jump,
CS = case Tag of
@@ -2099,13 +2102,22 @@ output_state_actions(St0, State, State, {Actions, Jump}, SI) ->
St = output_state_actions1(St0, State, Actions, true, {to, CS}, SI),
if
To =:= State ->
- output_state_actions1(St, CS, Common, true, normal, SI);
+ St1 = output_state_actions_begin(St, State, Actions),
+ output_state_actions1(St1, CS, Common, true, normal, SI);
true ->
St
end;
output_state_actions(St, State, JState, _XActions, _SI) ->
fwrite(St, <<"%% yeccpars2_~w: see yeccpars2_~w\n\n">>, [State, JState]).
+output_state_actions_begin(St, State, Actions) ->
+ case [yes || {_, #reduce{}} <- Actions] of
+ [] ->
+ fwrite(St, <<"-dialyzer({nowarn_function, yeccpars2_~w/7}).\n">>,
+ [State]); % Only when yeccerror(T) is output.
+ _ -> St
+ end.
+
output_state_actions1(St, State, [], IsFirst, normal, _SI) ->
output_state_actions_fini(State, IsFirst, St);
output_state_actions1(St0, State, [], IsFirst, {to, ToS}, _SI) ->
diff --git a/lib/parsetools/test/yecc_SUITE.erl b/lib/parsetools/test/yecc_SUITE.erl
index d308d21f82..c18dc15e37 100644
--- a/lib/parsetools/test/yecc_SUITE.erl
+++ b/lib/parsetools/test/yecc_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2005-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2005-2014. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -340,8 +340,8 @@ syntax(Config) when is_list(Config) ->
{_,[{L1,_,{undefined_function,{yeccpars2_2_,1}}},
{L2,_,{bad_inline,{yeccpars2_2_,1}}}]}],
[]} = compile:file(Parserfile1, [basic_validation,return]),
- ?line L1 = 28 + SzYeccPre,
- ?line L2 = 35 + SzYeccPre
+ ?line L1 = 31 + SzYeccPre,
+ ?line L2 = 38 + SzYeccPre
end(),
%% Bad macro in action. OTP-7224.
@@ -358,8 +358,8 @@ syntax(Config) when is_list(Config) ->
{_,[{L1,_,{undefined_function,{yeccpars2_2_,1}}},
{L2,_,{bad_inline,{yeccpars2_2_,1}}}]}],
[]} = compile:file(Parserfile1, [basic_validation,return]),
- ?line L1 = 28 + SzYeccPre,
- ?line L2 = 35 + SzYeccPre
+ ?line L1 = 31 + SzYeccPre,
+ ?line L2 = 38 + SzYeccPre
end(),
%% Check line numbers. OTP-7224.
@@ -1619,8 +1619,8 @@ otp_7292(Config) when is_list(Config) ->
{L2,_,{bad_inline,{yeccpars2_2_,1}}}]}],
[{_,[{16,_,{unused_function,{foo,0}}}]}]} =
compile:file(Parserfile1, [basic_validation, return]),
- ?line L1 = 38 + SzYeccPre,
- ?line L2 = 45 + SzYeccPre
+ L1 = 41 + SzYeccPre,
+ L2 = 48 + SzYeccPre
end(),
YeccPre = filename:join(Dir, "yeccpre.hrl"),
@@ -1637,8 +1637,8 @@ otp_7292(Config) when is_list(Config) ->
{L2,_,{bad_inline,{yeccpars2_2_,1}}}]}],
[{_,[{16,_,{unused_function,{foo,0}}}]}]} =
compile:file(Parserfile1, [basic_validation, return]),
- ?line L1 = 37 + SzYeccPre,
- ?line L2 = 44 + SzYeccPre
+ ?line L1 = 40 + SzYeccPre,
+ ?line L2 = 47 + SzYeccPre
end(),
file:delete(YeccPre),
diff --git a/lib/sasl/test/release_handler_SUITE.erl b/lib/sasl/test/release_handler_SUITE.erl
index bd7414fbb4..b7c5f34f58 100644
--- a/lib/sasl/test/release_handler_SUITE.erl
+++ b/lib/sasl/test/release_handler_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2011-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2011-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -1802,11 +1802,17 @@ upgrade_gg(cleanup,Config) ->
%%%-----------------------------------------------------------------
%%% OTP-10463, Bug - release_handler could not handle regexp in appup
%%% files.
-otp_10463_upgrade_script_regexp(_Config) ->
- %% Assuming that kernel always has a regexp in it's appup
- KernelVsn = vsn(kernel,current),
- {ok,KernelVsn,_} =
- release_handler:upgrade_script(kernel,code:lib_dir(kernel)),
+otp_10463_upgrade_script_regexp(Config) ->
+ DataDir = ?config(data_dir,Config),
+ code:add_path(filename:join([DataDir,regexp_appup,app1,ebin])),
+ application:start(app1),
+ {ok,"1.1",_} = release_handler:upgrade_script(app1,code:lib_dir(app1)),
+ ok.
+
+otp_10463_upgrade_script_regexp(cleanup,Config) ->
+ DataDir = ?config(data_dir,Config),
+ application:stop(app1),
+ code:del_path(filename:join([DataDir,regexp_appup,app1,ebin])),
ok.
no_dot_erlang(Conf) ->
diff --git a/lib/sasl/test/release_handler_SUITE_data/regexp_appup/app1/ebin/app1.app b/lib/sasl/test/release_handler_SUITE_data/regexp_appup/app1/ebin/app1.app
new file mode 100644
index 0000000000..ba6d09cd42
--- /dev/null
+++ b/lib/sasl/test/release_handler_SUITE_data/regexp_appup/app1/ebin/app1.app
@@ -0,0 +1,29 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%% This is an -*- erlang -*- file.
+%%
+{application, app1,
+ [
+ {description, "Test that release_handler can read appup with regexp"},
+ {vsn, "1.1"},
+ {modules, []},
+ {registered, []},
+ {applications, []}
+ ]
+}.
diff --git a/lib/sasl/test/release_handler_SUITE_data/regexp_appup/app1/ebin/app1.appup b/lib/sasl/test/release_handler_SUITE_data/regexp_appup/app1/ebin/app1.appup
new file mode 100644
index 0000000000..9c657232d0
--- /dev/null
+++ b/lib/sasl/test/release_handler_SUITE_data/regexp_appup/app1/ebin/app1.appup
@@ -0,0 +1,23 @@
+%% -*- erlang -*-
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+{"1.1",
+ %% Up from
+ [{<<"1(\\.[0-9]+)*">>,[{restart_application,app1}]}],
+ %% Down to
+ [{<<"1(\\.[0-9]+)*">>,[{restart_application,app1}]}]
+}.
diff --git a/lib/ssh/doc/src/ssh.xml b/lib/ssh/doc/src/ssh.xml
index 9f5d1c003d..d481a75c9a 100644
--- a/lib/ssh/doc/src/ssh.xml
+++ b/lib/ssh/doc/src/ssh.xml
@@ -234,11 +234,11 @@
<taglist>
<tag><c><![CDATA[{inet, inet | inet6}]]></c></tag>
<item> IP version to use when the host address is specified as <c>any</c>. </item>
- <tag><c><![CDATA[{subsystems, [subsystem_spec()]]]></c></tag>
+ <tag><c><![CDATA[{subsystems, [subsystem_spec()]}]]></c></tag>
<item>
Provides specifications for handling of subsystems. The
"sftp" subsystem spec can be retrieved by calling
- ssh_sftpd:subsystem_spec/1. If the subsystems option in
+ ssh_sftpd:subsystem_spec/1. If the subsystems option is
not present the value of
<c>[ssh_sftpd:subsystem_spec([])]</c> will be used. It is
of course possible to set the option to the empty list if
diff --git a/lib/ssh/doc/src/using_ssh.xml b/lib/ssh/doc/src/using_ssh.xml
index 9ab71260d3..46178d4018 100644
--- a/lib/ssh/doc/src/using_ssh.xml
+++ b/lib/ssh/doc/src/using_ssh.xml
@@ -79,7 +79,7 @@
<p> The option user_dir defaults to the users ~/.ssh directory</p>
<p>In the following example we generate new keys and host keys as
- to be able to run the example without having root privilages</p>
+ to be able to run the example without having root privileges</p>
<code>
$bash> ssh-keygen -t rsa -f /tmp/ssh_daemon/ssh_host_rsa_key
diff --git a/lib/ssh/src/ssh_connection_handler.erl b/lib/ssh/src/ssh_connection_handler.erl
index 915060c426..68523aa72b 100644
--- a/lib/ssh/src/ssh_connection_handler.erl
+++ b/lib/ssh/src/ssh_connection_handler.erl
@@ -1711,7 +1711,7 @@ handshake(Pid, Ref, Timeout) ->
{error, Reason}
after Timeout ->
stop(Pid),
- {error, Timeout}
+ {error, timeout}
end.
start_timeout(_,_, infinity) ->
diff --git a/lib/ssh/src/ssh_sftpd.erl b/lib/ssh/src/ssh_sftpd.erl
index 52665635f0..04ae6b11e2 100644
--- a/lib/ssh/src/ssh_sftpd.erl
+++ b/lib/ssh/src/ssh_sftpd.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2005-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2005-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -559,56 +559,73 @@ stat(ReqId, RelPath, State0=#state{file_handler=FileMod,
send_status({error, E}, ReqId, State1)
end.
-decode_4_open_flag(create_new) ->
- [write];
-decode_4_open_flag(create_truncate) ->
- [write];
-decode_4_open_flag(truncate_existing) ->
- [write];
-decode_4_open_flag(open_existing) ->
- [read].
-
-decode_4_flags([OpenFlag | Flags]) ->
- decode_4_flags(Flags, decode_4_open_flag(OpenFlag)).
-
-decode_4_flags([], Flags) ->
- Flags;
-decode_4_flags([append_data|R], _Flags) ->
- decode_4_flags(R, [append]);
-decode_4_flags([append_data_atomic|R], _Flags) ->
- decode_4_flags(R, [append]);
-decode_4_flags([_|R], Flags) ->
- decode_4_flags(R, Flags).
-
-decode_4_access_flag(read_data) ->
- [read];
-decode_4_access_flag(list_directory) ->
- [read];
-decode_4_access_flag(write_data) ->
- [write];
-decode_4_access_flag(add_file) ->
- [write];
-decode_4_access_flag(add_subdirectory) ->
- [read];
-decode_4_access_flag(append_data) ->
- [append];
-decode_4_access_flag(write_attributes) ->
- [write];
-decode_4_access_flag(_) ->
- [read].
-
-decode_4_acess([_ | _] = Flags) ->
+sftp_to_erlang_flag(read, Vsn) when Vsn == 3;
+ Vsn == 4 ->
+ read;
+sftp_to_erlang_flag(write, Vsn) when Vsn == 3;
+ Vsn == 4 ->
+ write;
+sftp_to_erlang_flag(append, Vsn) when Vsn == 3;
+ Vsn == 4 ->
+ append;
+sftp_to_erlang_flag(creat, Vsn) when Vsn == 3;
+ Vsn == 4 ->
+ write;
+sftp_to_erlang_flag(trunc, Vsn) when Vsn == 3;
+ Vsn == 4 ->
+ write;
+sftp_to_erlang_flag(excl, Vsn) when Vsn == 3;
+ Vsn == 4 ->
+ read;
+sftp_to_erlang_flag(create_new, Vsn) when Vsn > 4 ->
+ write;
+sftp_to_erlang_flag(create_truncate, Vsn) when Vsn > 4 ->
+ write;
+sftp_to_erlang_flag(open_existing, Vsn) when Vsn > 4 ->
+ read;
+sftp_to_erlang_flag(open_or_create, Vsn) when Vsn > 4 ->
+ write;
+sftp_to_erlang_flag(truncate_existing, Vsn) when Vsn > 4 ->
+ write;
+sftp_to_erlang_flag(append_data, Vsn) when Vsn > 4 ->
+ append;
+sftp_to_erlang_flag(append_data_atomic, Vsn) when Vsn > 4 ->
+ append;
+sftp_to_erlang_flag(_, _) ->
+ read.
+
+sftp_to_erlang_flags(Flags, Vsn) ->
lists:map(fun(Flag) ->
- [decode_4_access_flag(Flag)]
- end, Flags);
-decode_4_acess([]) ->
- [].
+ sftp_to_erlang_flag(Flag, Vsn)
+ end, Flags).
+
+sftp_to_erlang_access_flag(read_data, _) ->
+ read;
+sftp_to_erlang_access_flag(list_directory, _) ->
+ read;
+sftp_to_erlang_access_flag(write_data, _) ->
+ write;
+sftp_to_erlang_access_flag(append_data, _) ->
+ append;
+sftp_to_erlang_access_flag(add_subdirectory, _) ->
+ read;
+sftp_to_erlang_access_flag(add_file, _) ->
+ write;
+sftp_to_erlang_access_flag(write_attributes, _) ->
+ write;
+sftp_to_erlang_access_flag(_, _) ->
+ read.
+sftp_to_erlang_access_flags(Flags, Vsn) ->
+ lists:map(fun(Flag) ->
+ sftp_to_erlang_access_flag(Flag, Vsn)
+ end, Flags).
open(Vsn, ReqId, Data, State) when Vsn =< 3 ->
<<?UINT32(BLen), BPath:BLen/binary, ?UINT32(PFlags),
_Attrs/binary>> = Data,
Path = unicode:characters_to_list(BPath),
- Flags = ssh_xfer:decode_open_flags(Vsn, PFlags),
+ FlagBits = ssh_xfer:decode_open_flags(Vsn, PFlags),
+ Flags = lists:usort(sftp_to_erlang_flags(FlagBits, Vsn)),
do_open(ReqId, State, Path, Flags);
open(Vsn, ReqId, Data, State) when Vsn >= 4 ->
<<?UINT32(BLen), BPath:BLen/binary, ?UINT32(Access),
@@ -616,15 +633,12 @@ open(Vsn, ReqId, Data, State) when Vsn >= 4 ->
Path = unicode:characters_to_list(BPath),
FlagBits = ssh_xfer:decode_open_flags(Vsn, PFlags),
AcessBits = ssh_xfer:decode_ace_mask(Access),
- %% TODO: This is to make sure the Access flags are not ignored
- %% but this should be thought through better. This solution should
- %% be considered a hack in order to buy some time. At least
- %% it works better than when the Access flags where totally ignored.
- %% A better solution may need some code refactoring that we do
- %% not have time for right now.
- AcessFlags = decode_4_acess(AcessBits),
- Flags = lists:append(lists:umerge(
- [[decode_4_flags(FlagBits)] | AcessFlags])),
+ %% TODO: There are still flags that are not
+ %% fully handled as SSH_FXF_ACCESS_TEXT_MODE and
+ %% a lot a ACE flags, the later we may not need
+ %% to understand as they are NFS flags
+ AcessFlags = sftp_to_erlang_access_flags(AcessBits, Vsn),
+ Flags = lists:usort(sftp_to_erlang_flags(FlagBits, Vsn) ++ AcessFlags),
do_open(ReqId, State, Path, Flags).
do_open(ReqId, State0, Path, Flags) ->
diff --git a/lib/ssh/test/ssh_basic_SUITE.erl b/lib/ssh/test/ssh_basic_SUITE.erl
index 415cb9fc9c..cb1b4ae945 100644
--- a/lib/ssh/test/ssh_basic_SUITE.erl
+++ b/lib/ssh/test/ssh_basic_SUITE.erl
@@ -723,7 +723,7 @@ ssh_connect_arg4_timeout(_Config) ->
%% Wait for client reaction on the connection try:
receive
- {done, Client, {error,_E}, T0} ->
+ {done, Client, {error,timeout}, T0} ->
Msp = ms_passed(T0, now()),
exit(Server,hasta_la_vista___baby),
Low = 0.9*Timeout,
@@ -733,6 +733,11 @@ ssh_connect_arg4_timeout(_Config) ->
Low<Msp, Msp<High -> ok;
true -> {fail, "timeout not within limits"}
end;
+
+ {done, Client, {error,Other}, _T0} ->
+ ct:log("Error message \"~p\" from the client is unexpected.",[{error,Other}]),
+ {fail, "Unexpected error message"};
+
{done, Client, {ok,_Ref}, _T0} ->
{fail,"ssh-connected ???"}
after
diff --git a/lib/ssh/test/ssh_sftpd_SUITE.erl b/lib/ssh/test/ssh_sftpd_SUITE.erl
index 7b22e45d5e..0ce8eec906 100644
--- a/lib/ssh/test/ssh_sftpd_SUITE.erl
+++ b/lib/ssh/test/ssh_sftpd_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -56,7 +56,8 @@ all() ->
retrieve_attributes,
set_attributes,
links,
- ver3_rename,
+ ver3_rename,
+ ver3_open_flags,
relpath,
sshd_read_file,
ver6_basic].
@@ -193,6 +194,39 @@ open_close_file(Config) when is_list(Config) ->
?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES,
?SSH_FXF_OPEN_EXISTING).
+ver3_open_flags() ->
+ [{doc, "Test open flags"}].
+ver3_open_flags(Config) when is_list(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ FileName = filename:join(PrivDir, "not_exist.txt"),
+ {Cm, Channel} = ?config(sftp, Config),
+ ReqId = 0,
+
+ {ok, <<?SSH_FXP_HANDLE, ?UINT32(ReqId), Handle/binary>>, _} =
+ open_file_v3(FileName, Cm, Channel, ReqId,
+ ?SSH_FXF_CREAT bor ?SSH_FXF_TRUNC),
+ {ok, <<?SSH_FXP_STATUS, ?UINT32(ReqId),
+ ?UINT32(?SSH_FX_OK), _/binary>>, _} = close(Handle, ReqId,
+ Cm, Channel),
+
+ NewFileName = filename:join(PrivDir, "not_exist2.txt"),
+ NewReqId = ReqId + 1,
+ {ok, <<?SSH_FXP_HANDLE, ?UINT32(NewReqId), NewHandle/binary>>, _} =
+ open_file_v3(NewFileName, Cm, Channel, NewReqId,
+ ?SSH_FXF_CREAT bor ?SSH_FXF_EXCL),
+ {ok, <<?SSH_FXP_STATUS, ?UINT32(NewReqId),
+ ?UINT32(?SSH_FX_OK), _/binary>>, _} = close(NewHandle, NewReqId,
+ Cm, Channel),
+
+ NewFileName1 = filename:join(PrivDir, "test.txt"),
+ NewReqId1 = NewReqId + 1,
+ {ok, <<?SSH_FXP_HANDLE, ?UINT32(NewReqId1), NewHandle1/binary>>, _} =
+ open_file_v3(NewFileName1, Cm, Channel, NewReqId1,
+ ?SSH_FXF_READ bor ?SSH_FXF_WRITE bor ?SSH_FXF_APPEND),
+ {ok, <<?SSH_FXP_STATUS, ?UINT32(NewReqId1),
+ ?UINT32(?SSH_FX_OK), _/binary>>, _} = close(NewHandle1, NewReqId1,
+ Cm, Channel).
+
%%--------------------------------------------------------------------
open_close_dir() ->
[{doc,"Test SSH_FXP_OPENDIR and SSH_FXP_CLOSE commands"}].
@@ -662,6 +696,16 @@ open_file(File, Cm, Channel, ReqId, Access, Flags) ->
?SSH_FXP_OPEN, Data/binary>>),
reply(Cm, Channel).
+open_file_v3(File, Cm, Channel, ReqId, Flags) ->
+
+ Data = list_to_binary([?uint32(ReqId),
+ ?binary(list_to_binary(File)),
+ ?uint32(Flags),
+ ?REG_ATTERS]),
+ Size = 1 + size(Data),
+ ssh_connection:send(Cm, Channel, <<?UINT32(Size),
+ ?SSH_FXP_OPEN, Data/binary>>),
+ reply(Cm, Channel).
close(Handle, ReqId, Cm , Channel) ->
diff --git a/lib/ssl/doc/src/ssl.xml b/lib/ssl/doc/src/ssl.xml
index 83e5ed82bb..9e6d294f09 100644
--- a/lib/ssl/doc/src/ssl.xml
+++ b/lib/ssl/doc/src/ssl.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>1999</year><year>2014</year>
+ <year>1999</year><year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -38,7 +38,9 @@
<item>ssl requires the crypto and public_key applications.</item>
<item>Supported SSL/TLS-versions are SSL-3.0, TLS-1.0,
TLS-1.1 and TLS-1.2.</item>
- <item>For security reasons sslv2 is not supported.</item>
+ <item>For security reasons SSL-2.0 is not supported.</item>
+ <item>For security reasons SSL-3.0 is no longer supported by default,
+ but may be configured.</item>
<item>Ephemeral Diffie-Hellman cipher suites are supported
but not Diffie Hellman Certificates cipher suites.</item>
<item>Elliptic Curve cipher suites are supported if crypto
@@ -163,7 +165,7 @@
is supplied it will override the certfile option.</item>
<tag>{certfile, path()}</tag>
- <item>Path to a file containing the user's certificate.</item>
+ <item>Path to a file containing the user's PEM encoded certificate.</item>
<tag>{key, {'RSAPrivateKey'| 'DSAPrivateKey' | 'ECPrivateKey' |'PrivateKeyInfo', der_encoded()}}</tag>
<item> The DER encoded users private key. If this option
@@ -302,7 +304,7 @@ fun(OtpCert :: #'OTPCertificate'{}, Event :: {bad_cert, Reason :: atom() | {revo
</item>
- <tag>{partial_chain, fun(Chain::[DerCert]) -> {trusted_ca, DerCert} | unknown_ca </tag>
+ <tag>{partial_chain, fun(Chain::[DerCert]) -> {trusted_ca, DerCert} | unknown_ca }</tag>
<item>
Claim an intermediat CA in the chain as trusted. TLS will then perform the public_key:pkix_path_validation/3
with the selected CA as trusted anchor and the rest of the chain.
@@ -311,7 +313,7 @@ fun(OtpCert :: #'OTPCertificate'{}, Event :: {bad_cert, Reason :: atom() | {revo
<tag>{versions, [protocol()]}</tag>
<item>TLS protocol versions that will be supported by started clients and servers.
This option overrides the application environment option <c>protocol_version</c>. If the
- environment option is not set it defaults to all versions supported by the SSL application. See also
+ environment option is not set it defaults to all versions, except SSL-3.0, supported by the SSL application. See also
<seealso marker="ssl:ssl_app">ssl(6)</seealso>
</item>
@@ -348,11 +350,23 @@ fun(srp, Username :: string(), UserState :: term()) ->
</p>
</item>
+ <tag>{padding_check, boolean()}</tag>
+ <item>
+ <p> This option only affects TLS-1.0 connections.
+ If set to false it disables the block cipher padding check
+ to be able to interoperate with legacy software.
+ </p>
+
+ <warning><p> Using this option makes TLS vulnerable to
+ the Poodle attack</p></warning>
+
+ </item>
+
</taglist>
-
+
</section>
-
- <section>
+
+ <section>
<title>SSL OPTION DESCRIPTIONS - CLIENT SIDE</title>
<p>Options described here are client specific or has a slightly different
@@ -538,7 +552,19 @@ fun(srp, Username :: string(), UserState :: term()) ->
</p>
</desc>
</func>
-
+
+ <func>
+ <name>clear_pem_cache() -> ok </name>
+ <fsummary> Clears the pem cache</fsummary>
+
+ <desc><p>PEM files, used by ssl API-functions, are cached. The
+ cache is regularly checked to see if any cache entries should be
+ invalidated, however this function provides a way to
+ unconditionally clear the whole cache.
+ </p>
+ </desc>
+ </func>
+
<func>
<name>connect(Socket, SslOptions) -> </name>
<name>connect(Socket, SslOptions, Timeout) -> {ok, SslSocket}
@@ -904,19 +930,37 @@ fun(srp, Username :: string(), UserState :: term()) ->
</func>
<func>
- <name>versions() ->
- [{SslAppVer, SupportedSslVer, AvailableSslVsn}]</name>
+ <name>versions() -> [versions_info()]</name>
<fsummary>Returns version information relevant for the
ssl application.</fsummary>
<type>
- <v>SslAppVer = string()</v>
- <v>SupportedSslVer = [protocol()]</v>
- <v>AvailableSslVsn = [protocol()]</v>
+ <v>versions_info() = {app_vsn, string()} | {supported | available, [protocol()] </v>
</type>
<desc>
<p>
Returns version information relevant for the
- ssl application.</p>
+ ssl application.
+ </p>
+ <taglist>
+ <tag>app_vsn</tag>
+ <item> The application version of the OTP ssl application.</item>
+
+ <tag>supported</tag>
+
+ <item>TLS/SSL versions supported by default.
+ Overridden by a versions option on
+ <seealso marker="#connect-2"> connect/[2,3,4]</seealso>, <seealso
+ marker="#listen-2"> listen/2</seealso> and <seealso
+ marker="#ssl_accept-2">ssl_accept/[1,2,3]</seealso>. For the
+ negotiated TLS/SSL version see <seealso
+ marker="#connection_info-1">ssl:connection_info/1
+ </seealso></item>
+
+ <tag>available</tag>
+ <item>All TLS/SSL versions that the Erlang ssl application
+ can support. Note that TLS 1.2 requires sufficient support
+ from the crypto application. </item>
+ </taglist>
</desc>
</func>
<func>
diff --git a/lib/ssl/doc/src/ssl_app.xml b/lib/ssl/doc/src/ssl_app.xml
index c8024548b5..e3a3fc27f2 100644
--- a/lib/ssl/doc/src/ssl_app.xml
+++ b/lib/ssl/doc/src/ssl_app.xml
@@ -4,7 +4,7 @@
<appref>
<header>
<copyright>
- <year>1999</year><year>2014</year>
+ <year>1999</year><year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -82,7 +82,16 @@
callback module, defaults to [].
</p>
</item>
-
+
+ <tag><c><![CDATA[ssl_pem_cache_clean = integer() <optional>]]></c></tag>
+ <item>
+ <p>
+ Number of milliseconds between PEM cache validations.
+ </p>
+ <seealso
+ marker="ssl#clear_pem_cache-0">ssl:clear_pem_cache/0</seealso>
+
+ </item>
</taglist>
</section>
diff --git a/lib/ssl/src/dtls_record.erl b/lib/ssl/src/dtls_record.erl
index c0776e822b..59b3ddec5c 100644
--- a/lib/ssl/src/dtls_record.erl
+++ b/lib/ssl/src/dtls_record.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2013-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -194,7 +194,7 @@ decode_cipher_text(#ssl_tls{type = Type, version = Version,
compression_algorithm=CompAlg}
} = ReadState0}= ConnnectionStates0) ->
{PlainFragment, Mac, ReadState1} = ssl_record:decipher(dtls_v1:corresponding_tls_version(Version),
- CipherFragment, ReadState0),
+ CipherFragment, ReadState0, true),
MacHash = calc_mac_hash(ReadState1, Type, Version, Epoch, Seq, PlainFragment),
case ssl_record:is_correct_mac(Mac, MacHash) of
true ->
diff --git a/lib/ssl/src/ssl.appup.src b/lib/ssl/src/ssl.appup.src
index 7986722094..1476336039 100644
--- a/lib/ssl/src/ssl.appup.src
+++ b/lib/ssl/src/ssl.appup.src
@@ -1,14 +1,14 @@
%% -*- erlang -*-
{"%VSN%",
[
- {<<"5\\.3\\.[1-7]($|\\..*)">>, [{restart_application, ssl}]},
- {<<"5\\.[0-2]($|\\..*)">>, [{restart_application, ssl}]},
+ {<<"6\\..*">>, [{restart_application, ssl}]},
+ {<<"5\\..*">>, [{restart_application, ssl}]},
{<<"4\\..*">>, [{restart_application, ssl}]},
{<<"3\\..*">>, [{restart_application, ssl}]}
],
[
- {<<"5\\.3\\.[1-7]($|\\..*)">>, [{restart_application, ssl}]},
- {<<"5\\.[0-2]($|\\..*)">>, [{restart_application, ssl}]},
+ {<<"6\\..*">>, [{restart_application, ssl}]},
+ {<<"5\\..*">>, [{restart_application, ssl}]},
{<<"4\\..*">>, [{restart_application, ssl}]},
{<<"3\\..*">>, [{restart_application, ssl}]}
]
diff --git a/lib/ssl/src/ssl.erl b/lib/ssl/src/ssl.erl
index dcba69a65e..ab26b6abc4 100644
--- a/lib/ssl/src/ssl.erl
+++ b/lib/ssl/src/ssl.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -353,12 +353,8 @@ cipher_suites(openssl) ->
|| S <- ssl_cipher:filter_suites(ssl_cipher:suites(Version))];
cipher_suites(all) ->
Version = tls_record:highest_protocol_version([]),
- Supported = ssl_cipher:all_suites(Version)
- ++ ssl_cipher:anonymous_suites(Version)
- ++ ssl_cipher:psk_suites(Version)
- ++ ssl_cipher:srp_suites(),
- ssl_cipher:filter_suites([suite_definition(S) || S <- Supported]).
-
+ ssl_cipher:filter_suites([suite_definition(S)
+ || S <-ssl_cipher:all_suites(Version)]).
cipher_suites() ->
cipher_suites(erlang).
@@ -454,7 +450,7 @@ session_info(#sslsocket{pid = {Listen,_}}) when is_port(Listen) ->
versions() ->
Vsns = tls_record:supported_protocol_versions(),
SupportedVsns = [tls_record:protocol_version(Vsn) || Vsn <- Vsns],
- AvailableVsns = ?ALL_SUPPORTED_VERSIONS,
+ AvailableVsns = ?ALL_AVAILABLE_VERSIONS,
%% TODO Add DTLS versions when supported
[{ssl_app, ?VSN}, {supported, SupportedVsns}, {available, AvailableVsns}].
@@ -656,7 +652,8 @@ handle_options(Opts0) ->
log_alert = handle_option(log_alert, Opts, true),
server_name_indication = handle_option(server_name_indication, Opts, undefined),
honor_cipher_order = handle_option(honor_cipher_order, Opts, false),
- protocol = proplists:get_value(protocol, Opts, tls)
+ protocol = proplists:get_value(protocol, Opts, tls),
+ padding_check = proplists:get_value(padding_check, Opts, true)
},
CbInfo = proplists:get_value(cb_info, Opts, {gen_tcp, tcp, tcp_closed, tcp_error}),
@@ -669,7 +666,7 @@ handle_options(Opts0) ->
cb_info, renegotiate_at, secure_renegotiate, hibernate_after,
erl_dist, next_protocols_advertised,
client_preferred_next_protocols, log_alert,
- server_name_indication, honor_cipher_order],
+ server_name_indication, honor_cipher_order, padding_check],
SockOpts = lists:foldl(fun(Key, PropList) ->
proplists:delete(Key, PropList)
@@ -847,6 +844,8 @@ validate_option(server_name_indication, undefined) ->
undefined;
validate_option(honor_cipher_order, Value) when is_boolean(Value) ->
Value;
+validate_option(padding_check, Value) when is_boolean(Value) ->
+ Value;
validate_option(Opt, Value) ->
throw({error, {options, {Opt, Value}}}).
@@ -952,10 +951,7 @@ binary_cipher_suites(Version, [{_,_,_}| _] = Ciphers0) ->
binary_cipher_suites(Version, Ciphers);
binary_cipher_suites(Version, [Cipher0 | _] = Ciphers0) when is_binary(Cipher0) ->
- All = ssl_cipher:suites(Version)
- ++ ssl_cipher:anonymous_suites(Version)
- ++ ssl_cipher:psk_suites(Version)
- ++ ssl_cipher:srp_suites(),
+ All = ssl_cipher:all_suites(Version),
case [Cipher || Cipher <- Ciphers0, lists:member(Cipher, All)] of
[] ->
%% Defaults to all supported suites that does
@@ -1182,3 +1178,4 @@ handle_verify_options(Opts, CaCerts) ->
Value ->
throw({error, {options, {verify, Value}}})
end.
+
diff --git a/lib/ssl/src/ssl_certificate.erl b/lib/ssl/src/ssl_certificate.erl
index 9c0ed181fe..30d224fee2 100644
--- a/lib/ssl/src/ssl_certificate.erl
+++ b/lib/ssl/src/ssl_certificate.erl
@@ -282,7 +282,7 @@ other_issuer(OtpCert, CertDbHandle) ->
handle_path({BinCert, OTPCert}, Path, PartialChainHandler) ->
case public_key:pkix_is_self_signed(OTPCert) of
true ->
- {BinCert, Path};
+ {BinCert, lists:delete(BinCert, Path)};
false ->
handle_incomplete_chain(Path, PartialChainHandler)
end.
diff --git a/lib/ssl/src/ssl_cipher.erl b/lib/ssl/src/ssl_cipher.erl
index e1d89c149e..5ec6e1c31b 100644
--- a/lib/ssl/src/ssl_cipher.erl
+++ b/lib/ssl/src/ssl_cipher.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -33,10 +33,10 @@
-include_lib("public_key/include/public_key.hrl").
-export([security_parameters/2, security_parameters/3, suite_definition/1,
- cipher_init/3, decipher/5, cipher/5, decipher_aead/6, cipher_aead/6,
+ cipher_init/3, decipher/6, cipher/5, decipher_aead/6, cipher_aead/6,
suite/1, suites/1, all_suites/1,
ec_keyed_suites/0, anonymous_suites/1, psk_suites/1, srp_suites/0,
- openssl_suite/1, openssl_suite_name/1, filter/2, filter_suites/1,
+ rc4_suites/1, openssl_suite/1, openssl_suite_name/1, filter/2, filter_suites/1,
hash_algorithm/1, sign_algorithm/1, is_acceptable_hash/2]).
-export_type([cipher_suite/0,
@@ -182,7 +182,8 @@ block_cipher(Fun, BlockSz, #cipher_state{key=Key, iv=IV} = CS0,
{T, CS0#cipher_state{iv=NextIV}}.
%%--------------------------------------------------------------------
--spec decipher(cipher_enum(), integer(), #cipher_state{}, binary(), ssl_record:ssl_version()) ->
+-spec decipher(cipher_enum(), integer(), #cipher_state{}, binary(),
+ ssl_record:ssl_version(), boolean()) ->
{binary(), binary(), #cipher_state{}} | #alert{}.
%%
%% Description: Decrypts the data and the MAC using cipher described
@@ -190,9 +191,9 @@ block_cipher(Fun, BlockSz, #cipher_state{key=Key, iv=IV} = CS0,
%% Used for "MAC then Cipher" suites where first the data is decrypted
%% and the an HMAC of the decrypted data is checked
%%-------------------------------------------------------------------
-decipher(?NULL, _HashSz, CipherState, Fragment, _) ->
+decipher(?NULL, _HashSz, CipherState, Fragment, _, _) ->
{Fragment, <<>>, CipherState};
-decipher(?RC4, HashSz, CipherState = #cipher_state{state = State0}, Fragment, _) ->
+decipher(?RC4, HashSz, CipherState = #cipher_state{state = State0}, Fragment, _, _) ->
try crypto:stream_decrypt(State0, Fragment) of
{State, Text} ->
GSC = generic_stream_cipher_from_bin(Text, HashSz),
@@ -208,20 +209,20 @@ decipher(?RC4, HashSz, CipherState = #cipher_state{state = State0}, Fragment, _)
?ALERT_REC(?FATAL, ?BAD_RECORD_MAC)
end;
-decipher(?DES, HashSz, CipherState, Fragment, Version) ->
+decipher(?DES, HashSz, CipherState, Fragment, Version, PaddingCheck) ->
block_decipher(fun(Key, IV, T) ->
crypto:block_decrypt(des_cbc, Key, IV, T)
- end, CipherState, HashSz, Fragment, Version);
-decipher(?'3DES', HashSz, CipherState, Fragment, Version) ->
+ end, CipherState, HashSz, Fragment, Version, PaddingCheck);
+decipher(?'3DES', HashSz, CipherState, Fragment, Version, PaddingCheck) ->
block_decipher(fun(<<K1:8/binary, K2:8/binary, K3:8/binary>>, IV, T) ->
crypto:block_decrypt(des3_cbc, [K1, K2, K3], IV, T)
- end, CipherState, HashSz, Fragment, Version);
-decipher(?AES_CBC, HashSz, CipherState, Fragment, Version) ->
+ end, CipherState, HashSz, Fragment, Version, PaddingCheck);
+decipher(?AES_CBC, HashSz, CipherState, Fragment, Version, PaddingCheck) ->
block_decipher(fun(Key, IV, T) when byte_size(Key) =:= 16 ->
crypto:block_decrypt(aes_cbc128, Key, IV, T);
(Key, IV, T) when byte_size(Key) =:= 32 ->
crypto:block_decrypt(aes_cbc256, Key, IV, T)
- end, CipherState, HashSz, Fragment, Version).
+ end, CipherState, HashSz, Fragment, Version, PaddingCheck).
%%--------------------------------------------------------------------
-spec decipher_aead(cipher_enum(), #cipher_state{}, integer(), binary(), binary(), ssl_record:ssl_version()) ->
@@ -237,7 +238,7 @@ decipher_aead(?CHACHA20_POLY1305, CipherState, SeqNo, AAD, Fragment, Version) ->
aead_decipher(chacha20_poly1305, CipherState, SeqNo, AAD, Fragment, Version).
block_decipher(Fun, #cipher_state{key=Key, iv=IV} = CipherState0,
- HashSz, Fragment, Version) ->
+ HashSz, Fragment, Version, PaddingCheck) ->
try
Text = Fun(Key, IV, Fragment),
NextIV = next_iv(Fragment, IV),
@@ -245,7 +246,7 @@ block_decipher(Fun, #cipher_state{key=Key, iv=IV} = CipherState0,
Content = GBC#generic_block_cipher.content,
Mac = GBC#generic_block_cipher.mac,
CipherState1 = CipherState0#cipher_state{iv=GBC#generic_block_cipher.next_iv},
- case is_correct_padding(GBC, Version) of
+ case is_correct_padding(GBC, Version, PaddingCheck) of
true ->
{Content, Mac, CipherState1};
false ->
@@ -306,9 +307,10 @@ suites({3, N}) ->
all_suites(Version) ->
suites(Version)
- ++ ssl_cipher:anonymous_suites(Version)
- ++ ssl_cipher:psk_suites(Version)
- ++ ssl_cipher:srp_suites().
+ ++ anonymous_suites(Version)
+ ++ psk_suites(Version)
+ ++ srp_suites()
+ ++ rc4_suites(Version).
%%--------------------------------------------------------------------
-spec anonymous_suites(ssl_record:ssl_version() | integer()) -> [cipher_suite()].
%%
@@ -394,6 +396,24 @@ srp_suites() ->
?TLS_SRP_SHA_WITH_AES_256_CBC_SHA,
?TLS_SRP_SHA_RSA_WITH_AES_256_CBC_SHA,
?TLS_SRP_SHA_DSS_WITH_AES_256_CBC_SHA].
+%%--------------------------------------------------------------------
+-spec rc4_suites(Version::ssl_record:ssl_version()) -> [cipher_suite()].
+%%
+%% Description: Returns a list of the RSA|(ECDH/RSA)| (ECDH/ECDSA)
+%% with RC4 cipher suites, only supported if explicitly set by user.
+%% Are not considered secure any more. Other RC4 suites already
+%% belonged to the user configured only category.
+%%--------------------------------------------------------------------
+rc4_suites({3, 0}) ->
+ [?TLS_RSA_WITH_RC4_128_SHA,
+ ?TLS_RSA_WITH_RC4_128_MD5];
+rc4_suites({3, N}) when N =< 3 ->
+ [?TLS_ECDHE_ECDSA_WITH_RC4_128_SHA,
+ ?TLS_ECDHE_RSA_WITH_RC4_128_SHA,
+ ?TLS_RSA_WITH_RC4_128_SHA,
+ ?TLS_RSA_WITH_RC4_128_MD5,
+ ?TLS_ECDH_ECDSA_WITH_RC4_128_SHA,
+ ?TLS_ECDH_RSA_WITH_RC4_128_SHA].
%%--------------------------------------------------------------------
-spec suite_definition(cipher_suite()) -> int_cipher_suite().
@@ -1632,16 +1652,18 @@ generic_stream_cipher_from_bin(T, HashSz) ->
#generic_stream_cipher{content=Content,
mac=Mac}.
-%% For interoperability reasons we do not check the padding content in
-%% SSL 3.0 and TLS 1.0 as it is not strictly required and breaks
-%% interopability with for instance Google.
is_correct_padding(#generic_block_cipher{padding_length = Len,
- padding = Padding}, {3, N})
- when N == 0; N == 1 ->
- Len == byte_size(Padding);
-%% Padding must be check in TLS 1.1 and after
+ padding = Padding}, {3, 0}, _) ->
+ Len == byte_size(Padding); %% Only length check is done in SSL 3.0 spec
+%% For interoperability reasons it is possible to disable
+%% the padding check when using TLS 1.0, as it is not strictly required
+%% in the spec (only recommended), howerver this makes TLS 1.0 vunrable to the Poodle attack
+%% so by default this clause will not match
+is_correct_padding(GenBlockCipher, {3, 1}, false) ->
+ is_correct_padding(GenBlockCipher, {3, 0}, false);
+%% Padding must be checked in TLS 1.1 and after
is_correct_padding(#generic_block_cipher{padding_length = Len,
- padding = Padding}, _) ->
+ padding = Padding}, _, _) ->
Len == byte_size(Padding) andalso
list_to_binary(lists:duplicate(Len, Len)) == Padding.
diff --git a/lib/ssl/src/ssl_internal.hrl b/lib/ssl/src/ssl_internal.hrl
index 75efb64e3f..3cf6020169 100644
--- a/lib/ssl/src/ssl_internal.hrl
+++ b/lib/ssl/src/ssl_internal.hrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -67,8 +67,11 @@
-define(TRUE, 0).
-define(FALSE, 1).
--define(ALL_SUPPORTED_VERSIONS, ['tlsv1.2', 'tlsv1.1', tlsv1, sslv3]).
--define(MIN_SUPPORTED_VERSIONS, ['tlsv1.1', tlsv1, sslv3]).
+%% sslv3 is considered insecure due to lack of padding check (Poodle attack)
+%% Keep as interop with legacy software but do not support as default
+-define(ALL_AVAILABLE_VERSIONS, ['tlsv1.2', 'tlsv1.1', tlsv1, sslv3]).
+-define(ALL_SUPPORTED_VERSIONS, ['tlsv1.2', 'tlsv1.1', tlsv1]).
+-define(MIN_SUPPORTED_VERSIONS, ['tlsv1.1', tlsv1]).
-define(ALL_DATAGRAM_SUPPORTED_VERSIONS, ['dtlsv1.2', dtlsv1]).
-define(MIN_DATAGRAM_SUPPORTED_VERSIONS, ['dtlsv1.2', dtlsv1]).
@@ -117,7 +120,8 @@
server_name_indication = undefined,
%% Should the server prefer its own cipher order over the one provided by
%% the client?
- honor_cipher_order = false
+ honor_cipher_order = false,
+ padding_check = true
}).
-record(socket_options,
diff --git a/lib/ssl/src/ssl_manager.erl b/lib/ssl/src/ssl_manager.erl
index 5553fc9220..bf0333ba8d 100644
--- a/lib/ssl/src/ssl_manager.erl
+++ b/lib/ssl/src/ssl_manager.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -30,10 +30,10 @@
lookup_trusted_cert/4,
new_session_id/1, clean_cert_db/2,
register_session/2, register_session/3, invalidate_session/2,
- invalidate_session/3, clear_pem_cache/0, manager_name/1]).
+ invalidate_session/3, invalidate_pem/1, clear_pem_cache/0, manager_name/1]).
% Spawn export
--export([init_session_validator/1]).
+-export([init_session_validator/1, init_pem_cache_validator/1]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
@@ -50,7 +50,9 @@
session_lifetime,
certificate_db,
session_validation_timer,
- last_delay_timer = {undefined, undefined}%% Keep for testing purposes
+ last_delay_timer = {undefined, undefined},%% Keep for testing purposes
+ last_pem_check,
+ clear_pem_cache
}).
-define('24H_in_msec', 86400000).
@@ -118,14 +120,13 @@ connection_init(Trustedcerts, Role) ->
%% Description: Cache a pem file and return its content.
%%--------------------------------------------------------------------
cache_pem_file(File, DbHandle) ->
- MD5 = crypto:hash(md5, File),
- case ssl_pkix_db:lookup_cached_pem(DbHandle, MD5) of
+ case ssl_pkix_db:lookup_cached_pem(DbHandle, File) of
[{Content,_}] ->
{ok, Content};
[Content] ->
{ok, Content};
undefined ->
- call({cache_pem, {MD5, File}})
+ call({cache_pem, File})
end.
%%--------------------------------------------------------------------
@@ -192,6 +193,11 @@ invalidate_session(Host, Port, Session) ->
invalidate_session(Port, Session) ->
cast({invalidate_session, Port, Session}).
+
+-spec invalidate_pem(File::binary()) -> ok.
+invalidate_pem(File) ->
+ cast({invalidate_pem, File}).
+
%%====================================================================
%% gen_server callbacks
%%====================================================================
@@ -216,13 +222,17 @@ init([Name, Opts]) ->
proplists:get_value(session_cb_init_args, Opts, [])]),
Timer = erlang:send_after(SessionLifeTime * 1000 + 5000,
self(), validate_sessions),
- erlang:send_after(?CLEAR_PEM_CACHE, self(), clear_pem_cache),
+ Interval = pem_check_interval(),
+ erlang:send_after(Interval, self(), clear_pem_cache),
{ok, #state{certificate_db = CertDb,
session_cache_client = ClientSessionCache,
session_cache_server = ServerSessionCache,
session_cache_cb = CacheCb,
session_lifetime = SessionLifeTime,
- session_validation_timer = Timer}}.
+ session_validation_timer = Timer,
+ last_pem_check = os:timestamp(),
+ clear_pem_cache = Interval
+ }}.
%%--------------------------------------------------------------------
-spec handle_call(msg(), from(), #state{}) -> {reply, reply(), #state{}}.
@@ -279,7 +289,7 @@ handle_call({{new_session_id,Port}, _},
{reply, Id, State};
-handle_call({{cache_pem, File}, _Pid}, _,
+handle_call({{cache_pem,File}, _Pid}, _,
#state{certificate_db = Db} = State) ->
try ssl_pkix_db:cache_pem_file(File, Db) of
Result ->
@@ -332,7 +342,12 @@ handle_cast({invalidate_session, Host, Port,
handle_cast({invalidate_session, Port, #session{session_id = ID} = Session},
#state{session_cache_server = Cache,
session_cache_cb = CacheCb} = State) ->
- invalidate_session(Cache, CacheCb, {Port, ID}, Session, State).
+ invalidate_session(Cache, CacheCb, {Port, ID}, Session, State);
+
+handle_cast({invalidate_pem, File},
+ #state{certificate_db = [_, _, PemCache]} = State) ->
+ ssl_pkix_db:remove(File, PemCache),
+ {noreply, State}.
%%--------------------------------------------------------------------
-spec handle_info(msg(), #state{}) -> {noreply, #state{}}.
@@ -353,20 +368,19 @@ handle_info(validate_sessions, #state{session_cache_cb = CacheCb,
start_session_validator(ServerCache, CacheCb, LifeTime),
{noreply, State#state{session_validation_timer = Timer}};
+
handle_info({delayed_clean_session, Key, Cache}, #state{session_cache_cb = CacheCb
} = State) ->
CacheCb:delete(Cache, Key),
{noreply, State};
-handle_info(clear_pem_cache, #state{certificate_db = [_,_,PemChace]} = State) ->
- case ssl_pkix_db:db_size(PemChace) of
- N when N < ?NOT_TO_BIG ->
- ok;
- _ ->
- ssl_pkix_db:clear(PemChace)
- end,
- erlang:send_after(?CLEAR_PEM_CACHE, self(), clear_pem_cache),
- {noreply, State};
+handle_info(clear_pem_cache, #state{certificate_db = [_,_,PemChace],
+ clear_pem_cache = Interval,
+ last_pem_check = CheckPoint} = State) ->
+ NewCheckPoint = os:timestamp(),
+ start_pem_cache_validator(PemChace, CheckPoint),
+ erlang:send_after(Interval, self(), clear_pem_cache),
+ {noreply, State#state{last_pem_check = NewCheckPoint}};
handle_info({clean_cert_db, Ref, File},
@@ -514,10 +528,9 @@ new_id(Port, Tries, Cache, CacheCb) ->
clean_cert_db(Ref, CertDb, RefDb, PemCache, File) ->
case ssl_pkix_db:ref_count(Ref, RefDb, 0) of
0 ->
- MD5 = crypto:hash(md5, File),
- case ssl_pkix_db:lookup_cached_pem(PemCache, MD5) of
+ case ssl_pkix_db:lookup_cached_pem(PemCache, File) of
[{Content, Ref}] ->
- ssl_pkix_db:insert(MD5, Content, PemCache);
+ ssl_pkix_db:insert(File, Content, PemCache);
_ ->
ok
end,
@@ -557,3 +570,39 @@ exists_equivalent(#session{
true;
exists_equivalent(Session, [ _ | Rest]) ->
exists_equivalent(Session, Rest).
+
+start_pem_cache_validator(PemCache, CheckPoint) ->
+ spawn_link(?MODULE, init_pem_cache_validator,
+ [[get(ssl_manager), PemCache, CheckPoint]]).
+
+init_pem_cache_validator([SslManagerName, PemCache, CheckPoint]) ->
+ put(ssl_manager, SslManagerName),
+ ssl_pkix_db:foldl(fun pem_cache_validate/2,
+ CheckPoint, PemCache).
+
+pem_cache_validate({File, _}, CheckPoint) ->
+ case file:read_file_info(File, []) of
+ {ok, #file_info{mtime = Time}} ->
+ case is_before_checkpoint(Time, CheckPoint) of
+ true ->
+ ok;
+ false ->
+ invalidate_pem(File)
+ end;
+ _ ->
+ invalidate_pem(File)
+ end,
+ CheckPoint.
+
+pem_check_interval() ->
+ case application:get_env(ssl, ssl_pem_cache_clean) of
+ {ok, Interval} when is_integer(Interval) ->
+ Interval;
+ _ ->
+ ?CLEAR_PEM_CACHE
+ end.
+
+is_before_checkpoint(Time, CheckPoint) ->
+ calendar:datetime_to_gregorian_seconds(calendar:now_to_datetime(CheckPoint)) -
+ calendar:datetime_to_gregorian_seconds(Time) > 0.
+
diff --git a/lib/ssl/src/ssl_pkix_db.erl b/lib/ssl/src/ssl_pkix_db.erl
index e59aba0618..8531445ba4 100644
--- a/lib/ssl/src/ssl_pkix_db.erl
+++ b/lib/ssl/src/ssl_pkix_db.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -81,10 +81,10 @@ lookup_trusted_cert(DbHandle, Ref, SerialNumber, Issuer) ->
{ok, Certs}
end.
-lookup_cached_pem([_, _, PemChache], MD5) ->
- lookup_cached_pem(PemChache, MD5);
-lookup_cached_pem(PemChache, MD5) ->
- lookup(MD5, PemChache).
+lookup_cached_pem([_, _, PemChache], File) ->
+ lookup_cached_pem(PemChache, File);
+lookup_cached_pem(PemChache, File) ->
+ lookup(File, PemChache).
%%--------------------------------------------------------------------
-spec add_trusted_certs(pid(), {erlang:timestamp(), string()} |
@@ -100,36 +100,35 @@ add_trusted_certs(_Pid, {der, DerList}, [CerDb, _,_]) ->
{ok, NewRef};
add_trusted_certs(_Pid, File, [CertsDb, RefDb, PemChache] = Db) ->
- MD5 = crypto:hash(md5, File),
- case lookup_cached_pem(Db, MD5) of
+ case lookup_cached_pem(Db, File) of
[{_Content, Ref}] ->
ref_count(Ref, RefDb, 1),
{ok, Ref};
[Content] ->
Ref = make_ref(),
update_counter(Ref, 1, RefDb),
- insert(MD5, {Content, Ref}, PemChache),
+ insert(File, {Content, Ref}, PemChache),
add_certs_from_pem(Content, Ref, CertsDb),
{ok, Ref};
undefined ->
- new_trusted_cert_entry({MD5, File}, Db)
+ new_trusted_cert_entry(File, Db)
end.
%%--------------------------------------------------------------------
%%
%% Description: Cache file as binary in DB
%%--------------------------------------------------------------------
--spec cache_pem_file({binary(), binary()}, [db_handle()]) -> {ok, term()}.
-cache_pem_file({MD5, File}, [_CertsDb, _RefDb, PemChache]) ->
+-spec cache_pem_file(binary(), [db_handle()]) -> {ok, term()}.
+cache_pem_file(File, [_CertsDb, _RefDb, PemChache]) ->
{ok, PemBin} = file:read_file(File),
Content = public_key:pem_decode(PemBin),
- insert(MD5, Content, PemChache),
+ insert(File, Content, PemChache),
{ok, Content}.
--spec cache_pem_file(reference(), {binary(), binary()}, [db_handle()]) -> {ok, term()}.
-cache_pem_file(Ref, {MD5, File}, [_CertsDb, _RefDb, PemChache]) ->
+-spec cache_pem_file(reference(), binary(), [db_handle()]) -> {ok, term()}.
+cache_pem_file(Ref, File, [_CertsDb, _RefDb, PemChache]) ->
{ok, PemBin} = file:read_file(File),
Content = public_key:pem_decode(PemBin),
- insert(MD5, {Content, Ref}, PemChache),
+ insert(File, {Content, Ref}, PemChache),
{ok, Content}.
%%--------------------------------------------------------------------
@@ -245,9 +244,9 @@ add_certs(Cert, Ref, CertsDb) ->
error_logger:info_report(Report)
end.
-new_trusted_cert_entry(FileRef, [CertsDb, RefDb, _] = Db) ->
+new_trusted_cert_entry(File, [CertsDb, RefDb, _] = Db) ->
Ref = make_ref(),
update_counter(Ref, 1, RefDb),
- {ok, Content} = cache_pem_file(Ref, FileRef, Db),
+ {ok, Content} = cache_pem_file(Ref, File, Db),
add_certs_from_pem(Content, Ref, CertsDb),
{ok, Ref}.
diff --git a/lib/ssl/src/ssl_record.erl b/lib/ssl/src/ssl_record.erl
index 63fc57edad..a02375a947 100644
--- a/lib/ssl/src/ssl_record.erl
+++ b/lib/ssl/src/ssl_record.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2013-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -48,7 +48,7 @@
-export([compress/3, uncompress/3, compressions/0]).
%% Payload encryption/decryption
--export([cipher/4, decipher/3, is_correct_mac/2,
+-export([cipher/4, decipher/4, is_correct_mac/2,
cipher_aead/4, decipher_aead/4]).
-export_type([ssl_version/0, ssl_atom_version/0]).
@@ -396,7 +396,7 @@ cipher_aead(Version, Fragment,
{CipherFragment, WriteState0#connection_state{cipher_state = CipherS1}}.
%%--------------------------------------------------------------------
--spec decipher(ssl_version(), binary(), #connection_state{}) -> {binary(), binary(), #connection_state{}} | #alert{}.
+-spec decipher(ssl_version(), binary(), #connection_state{}, boolean()) -> {binary(), binary(), #connection_state{}} | #alert{}.
%%
%% Description: Payload decryption
%%--------------------------------------------------------------------
@@ -406,8 +406,8 @@ decipher(Version, CipherFragment,
BulkCipherAlgo,
hash_size = HashSz},
cipher_state = CipherS0
- } = ReadState) ->
- case ssl_cipher:decipher(BulkCipherAlgo, HashSz, CipherS0, CipherFragment, Version) of
+ } = ReadState, PaddingCheck) ->
+ case ssl_cipher:decipher(BulkCipherAlgo, HashSz, CipherS0, CipherFragment, Version, PaddingCheck) of
{PlainFragment, Mac, CipherS1} ->
CS1 = ReadState#connection_state{cipher_state = CipherS1},
{PlainFragment, Mac, CS1};
diff --git a/lib/ssl/src/ssl_v3.erl b/lib/ssl/src/ssl_v3.erl
index 68f7f5dee2..169b39be32 100644
--- a/lib/ssl/src/ssl_v3.erl
+++ b/lib/ssl/src/ssl_v3.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -143,9 +143,6 @@ suites() ->
?TLS_DHE_RSA_WITH_AES_128_CBC_SHA,
?TLS_DHE_DSS_WITH_AES_128_CBC_SHA,
?TLS_RSA_WITH_AES_128_CBC_SHA,
- %%?TLS_RSA_WITH_IDEA_CBC_SHA,
- ?TLS_RSA_WITH_RC4_128_SHA,
- ?TLS_RSA_WITH_RC4_128_MD5,
?TLS_RSA_WITH_DES_CBC_SHA
].
diff --git a/lib/ssl/src/tls_connection.erl b/lib/ssl/src/tls_connection.erl
index 7df73fb581..77d3aa7889 100644
--- a/lib/ssl/src/tls_connection.erl
+++ b/lib/ssl/src/tls_connection.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -482,8 +482,9 @@ next_record(#state{protocol_buffers = #protocol_buffers{tls_packets = [], tls_ci
next_record(#state{protocol_buffers =
#protocol_buffers{tls_packets = [], tls_cipher_texts = [CT | Rest]}
= Buffers,
- connection_states = ConnStates0} = State) ->
- case tls_record:decode_cipher_text(CT, ConnStates0) of
+ connection_states = ConnStates0,
+ ssl_options = #ssl_options{padding_check = Check}} = State) ->
+ case tls_record:decode_cipher_text(CT, ConnStates0, Check) of
{Plain, ConnStates} ->
{Plain, State#state{protocol_buffers =
Buffers#protocol_buffers{tls_cipher_texts = Rest},
diff --git a/lib/ssl/src/tls_record.erl b/lib/ssl/src/tls_record.erl
index 544d200f70..3d5c5c0da3 100644
--- a/lib/ssl/src/tls_record.erl
+++ b/lib/ssl/src/tls_record.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -34,7 +34,7 @@
-export([get_tls_records/2]).
%% Decoding
--export([decode_cipher_text/2]).
+-export([decode_cipher_text/3]).
%% Encoding
-export([encode_plain_text/4]).
@@ -159,7 +159,7 @@ encode_plain_text(Type, Version, Data,
{CipherText, ConnectionStates#connection_states{current_write = WriteState#connection_state{sequence_number = Seq +1}}}.
%%--------------------------------------------------------------------
--spec decode_cipher_text(#ssl_tls{}, #connection_states{}) ->
+-spec decode_cipher_text(#ssl_tls{}, #connection_states{}, boolean()) ->
{#ssl_tls{}, #connection_states{}}| #alert{}.
%%
%% Description: Decode cipher text
@@ -174,7 +174,7 @@ decode_cipher_text(#ssl_tls{type = Type, version = Version,
#security_parameters{
cipher_type = ?AEAD,
compression_algorithm=CompAlg}
- } = ReadState0} = ConnnectionStates0) ->
+ } = ReadState0} = ConnnectionStates0, _) ->
AAD = calc_aad(Type, Version, ReadState0),
case ssl_record:decipher_aead(Version, CipherFragment, ReadState0, AAD) of
{PlainFragment, ReadState1} ->
@@ -197,8 +197,8 @@ decode_cipher_text(#ssl_tls{type = Type, version = Version,
sequence_number = Seq,
security_parameters=
#security_parameters{compression_algorithm=CompAlg}
- } = ReadState0} = ConnnectionStates0) ->
- case ssl_record:decipher(Version, CipherFragment, ReadState0) of
+ } = ReadState0} = ConnnectionStates0, PaddingCheck) ->
+ case ssl_record:decipher(Version, CipherFragment, ReadState0, PaddingCheck) of
{PlainFragment, Mac, ReadState1} ->
MacHash = calc_mac_hash(Type, Version, PlainFragment, ReadState1),
case ssl_record:is_correct_mac(Mac, MacHash) of
@@ -311,8 +311,17 @@ supported_protocol_versions([]) ->
Vsns;
supported_protocol_versions([_|_] = Vsns) ->
- Vsns.
-
+ case sufficient_tlsv1_2_crypto_support() of
+ true ->
+ Vsns;
+ false ->
+ case Vsns -- ['tlsv1.2'] of
+ [] ->
+ ?MIN_SUPPORTED_VERSIONS;
+ NewVsns ->
+ NewVsns
+ end
+ end.
%%--------------------------------------------------------------------
%%
%% Description: ssl version 2 is not acceptable security risks are too big.
diff --git a/lib/ssl/src/tls_v1.erl b/lib/ssl/src/tls_v1.erl
index c4114278a4..559fc1d6a8 100644
--- a/lib/ssl/src/tls_v1.erl
+++ b/lib/ssl/src/tls_v1.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -208,15 +208,7 @@ suites(Minor) when Minor == 1; Minor == 2 ->
?TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA,
?TLS_ECDH_RSA_WITH_AES_128_CBC_SHA,
?TLS_RSA_WITH_AES_128_CBC_SHA,
-
- ?TLS_ECDHE_ECDSA_WITH_RC4_128_SHA,
- ?TLS_ECDHE_RSA_WITH_RC4_128_SHA,
- ?TLS_RSA_WITH_RC4_128_SHA,
- ?TLS_RSA_WITH_RC4_128_MD5,
?TLS_DHE_RSA_WITH_DES_CBC_SHA,
- ?TLS_ECDH_ECDSA_WITH_RC4_128_SHA,
- ?TLS_ECDH_RSA_WITH_RC4_128_SHA,
-
?TLS_RSA_WITH_DES_CBC_SHA
];
suites(3) ->
diff --git a/lib/ssl/test/Makefile b/lib/ssl/test/Makefile
index 3639c2b2da..09cc5981e7 100644
--- a/lib/ssl/test/Makefile
+++ b/lib/ssl/test/Makefile
@@ -1,7 +1,7 @@
#
# %CopyrightBegin%
#
-# Copyright Ericsson AB 1999-2013. All Rights Reserved.
+# Copyright Ericsson AB 1999-2015. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -47,9 +47,11 @@ MODULES = \
ssl_npn_handshake_SUITE \
ssl_packet_SUITE \
ssl_payload_SUITE \
+ ssl_pem_cache_SUITE \
ssl_session_cache_SUITE \
ssl_to_openssl_SUITE \
ssl_ECC_SUITE \
+ ssl_upgrade_SUITE\
make_certs\
erl_make_certs
diff --git a/lib/ssl/test/ssl_basic_SUITE.erl b/lib/ssl/test/ssl_basic_SUITE.erl
index dc9e8934e6..77ef8088b4 100644
--- a/lib/ssl/test/ssl_basic_SUITE.erl
+++ b/lib/ssl/test/ssl_basic_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -65,7 +65,7 @@ groups() ->
{'tlsv1.2', [], all_versions_groups()},
{'tlsv1.1', [], all_versions_groups()},
{'tlsv1', [], all_versions_groups() ++ rizzo_tests()},
- {'sslv3', [], all_versions_groups() ++ rizzo_tests()},
+ {'sslv3', [], all_versions_groups() ++ rizzo_tests() ++ [ciphersuite_vs_version]},
{api,[], api_tests()},
{session, [], session_tests()},
{renegotiate, [], renegotiate_tests()},
@@ -90,7 +90,8 @@ basic_tests() ->
version_option,
connect_twice,
connect_dist,
- clear_pem_cache
+ clear_pem_cache,
+ defaults
].
options_tests() ->
@@ -116,7 +117,6 @@ options_tests() ->
tcp_reuseaddr,
honor_server_cipher_order,
honor_client_cipher_order,
- ciphersuite_vs_version,
unordered_protocol_versions_server,
unordered_protocol_versions_client
].
@@ -177,6 +177,9 @@ cipher_tests() ->
srp_cipher_suites,
srp_anon_cipher_suites,
srp_dsa_cipher_suites,
+ rc4_rsa_cipher_suites,
+ rc4_ecdh_rsa_cipher_suites,
+ rc4_ecdsa_cipher_suites,
default_reject_anonymous].
cipher_tests_ec() ->
@@ -256,11 +259,6 @@ init_per_testcase(Case, Config) when Case == unordered_protocol_versions_client
_ ->
{skip, "TLS 1.2 need but not supported on this platform"}
end;
-init_per_testcase(no_authority_key_identifier, Config) ->
- %% Clear cach so that root cert will not
- %% be found.
- ssl:clear_pem_cache(),
- Config;
init_per_testcase(protocol_versions, Config) ->
ssl:stop(),
@@ -343,7 +341,7 @@ alerts(Config) when is_list(Config) ->
end, Alerts).
%%--------------------------------------------------------------------
new_options_in_accept() ->
- [{doc,"Test that you can set ssl options in ssl_accept/3 and not tcp upgrade"}].
+ [{doc,"Test that you can set ssl options in ssl_accept/3 and not only in tcp upgrade"}].
new_options_in_accept(Config) when is_list(Config) ->
ClientOpts = ?config(client_opts, Config),
ServerOpts0 = ?config(server_dsa_opts, Config),
@@ -361,7 +359,9 @@ new_options_in_accept(Config) when is_list(Config) ->
{host, Hostname},
{from, self()},
{mfa, {?MODULE, connection_info_result, []}},
- {options, [{versions, [sslv3]} | ClientOpts]}]),
+ {options, [{versions, [sslv3]},
+ {ciphers,[{rsa,rc4_128,sha}
+ ]} | ClientOpts]}]),
ct:log("Testcase ~p, Client ~p Server ~p ~n",
[self(), Client, Server]),
@@ -391,7 +391,7 @@ connection_info(Config) when is_list(Config) ->
{from, self()},
{mfa, {?MODULE, connection_info_result, []}},
{options,
- [{ciphers,[{rsa,rc4_128,sha,no_export}]} |
+ [{ciphers,[{rsa,des_cbc,sha,no_export}]} |
ClientOpts]}]),
ct:log("Testcase ~p, Client ~p Server ~p ~n",
@@ -400,7 +400,7 @@ connection_info(Config) when is_list(Config) ->
Version =
tls_record:protocol_version(tls_record:highest_protocol_version([])),
- ServerMsg = ClientMsg = {ok, {Version, {rsa,rc4_128,sha}}},
+ ServerMsg = ClientMsg = {ok, {Version, {rsa, des_cbc, sha}}},
ssl_test_lib:check_result(Server, ServerMsg, Client, ClientMsg),
@@ -1779,6 +1779,32 @@ srp_dsa_cipher_suites(Config) when is_list(Config) ->
Version = tls_record:protocol_version(tls_record:highest_protocol_version([])),
Ciphers = ssl_test_lib:srp_dss_suites(),
run_suites(Ciphers, Version, Config, srp_dsa).
+%%-------------------------------------------------------------------
+rc4_rsa_cipher_suites()->
+ [{doc, "Test the RC4 ciphersuites"}].
+rc4_rsa_cipher_suites(Config) when is_list(Config) ->
+ NVersion = tls_record:highest_protocol_version([]),
+ Version = tls_record:protocol_version(NVersion),
+ Ciphers = ssl_test_lib:rc4_suites(NVersion),
+ run_suites(Ciphers, Version, Config, rc4_rsa).
+%-------------------------------------------------------------------
+rc4_ecdh_rsa_cipher_suites()->
+ [{doc, "Test the RC4 ciphersuites"}].
+rc4_ecdh_rsa_cipher_suites(Config) when is_list(Config) ->
+ NVersion = tls_record:highest_protocol_version([]),
+ Version = tls_record:protocol_version(NVersion),
+ Ciphers = ssl_test_lib:rc4_suites(NVersion),
+ run_suites(Ciphers, Version, Config, rc4_ecdh_rsa).
+
+%%-------------------------------------------------------------------
+rc4_ecdsa_cipher_suites()->
+ [{doc, "Test the RC4 ciphersuites"}].
+rc4_ecdsa_cipher_suites(Config) when is_list(Config) ->
+ NVersion = tls_record:highest_protocol_version([]),
+ Version = tls_record:protocol_version(NVersion),
+ Ciphers = ssl_test_lib:rc4_suites(NVersion),
+ run_suites(Ciphers, Version, Config, rc4_ecdsa).
+
%%--------------------------------------------------------------------
default_reject_anonymous()->
[{doc,"Test that by default anonymous cipher suites are rejected "}].
@@ -2507,6 +2533,16 @@ no_reuses_session_server_restart_new_cert_file(Config) when is_list(Config) ->
ssl_test_lib:close(Client1).
%%--------------------------------------------------------------------
+defaults(Config) when is_list(Config)->
+ [_,
+ {supported, Supported},
+ {available, Available}]
+ = ssl:versions(),
+ true = lists:member(sslv3, Available),
+ false = lists:member(sslv3, Supported),
+ false = lists:member({rsa,rc4_128,sha}, ssl:cipher_suites()),
+ true = lists:member({rsa,rc4_128,sha}, ssl:cipher_suites(all)).
+%%--------------------------------------------------------------------
reuseaddr() ->
[{doc,"Test reuseaddr option"}].
@@ -2631,6 +2667,8 @@ honor_cipher_order(Config, Honor, ServerCiphers, ClientCiphers, Expected) ->
ssl_test_lib:close(Client).
%%--------------------------------------------------------------------
+ciphersuite_vs_version() ->
+ [{doc,"Test a SSLv3 client can not negotiate a TLSv* cipher suite."}].
ciphersuite_vs_version(Config) when is_list(Config) ->
{_ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
@@ -3694,8 +3732,20 @@ run_suites(Ciphers, Version, Config, Type) ->
?config(server_ecdsa_opts, Config)};
ecdh_rsa ->
{?config(client_opts, Config),
- ?config(server_ecdh_rsa_opts, Config)}
- end,
+ ?config(server_ecdh_rsa_opts, Config)};
+ rc4_rsa ->
+ {?config(client_opts, Config),
+ [{ciphers, Ciphers} |
+ ?config(server_opts, Config)]};
+ rc4_ecdh_rsa ->
+ {?config(client_opts, Config),
+ [{ciphers, Ciphers} |
+ ?config(server_ecdh_rsa_opts, Config)]};
+ rc4_ecdsa ->
+ {?config(client_opts, Config),
+ [{ciphers, Ciphers} |
+ ?config(server_ecdsa_opts, Config)]}
+ end,
Result = lists:map(fun(Cipher) ->
cipher(Cipher, Version, Config, ClientOpts, ServerOpts) end,
@@ -3716,6 +3766,7 @@ erlang_cipher_suite(Suite) ->
cipher(CipherSuite, Version, Config, ClientOpts, ServerOpts) ->
%% process_flag(trap_exit, true),
ct:log("Testing CipherSuite ~p~n", [CipherSuite]),
+ ct:log("Server Opts ~p~n", [ServerOpts]),
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
ErlangCipherSuite = erlang_cipher_suite(CipherSuite),
diff --git a/lib/ssl/test/ssl_certificate_verify_SUITE.erl b/lib/ssl/test/ssl_certificate_verify_SUITE.erl
index b7864ba6e7..dab7a941db 100644
--- a/lib/ssl/test/ssl_certificate_verify_SUITE.erl
+++ b/lib/ssl/test/ssl_certificate_verify_SUITE.erl
@@ -443,7 +443,7 @@ verify_fun_always_run_client(Config) when is_list(Config) ->
{unknown, UserState};
(_, valid, [ChainLen]) ->
{valid, [ChainLen + 1]};
- (_, valid_peer, [2]) ->
+ (_, valid_peer, [1]) ->
{fail, "verify_fun_was_always_run"};
(_, valid_peer, UserState) ->
{valid, UserState}
@@ -482,7 +482,7 @@ verify_fun_always_run_server(Config) when is_list(Config) ->
{unknown, UserState};
(_, valid, [ChainLen]) ->
{valid, [ChainLen + 1]};
- (_, valid_peer, [2]) ->
+ (_, valid_peer, [1]) ->
{fail, "verify_fun_was_always_run"};
(_, valid_peer, UserState) ->
{valid, UserState}
diff --git a/lib/ssl/test/ssl_cipher_SUITE.erl b/lib/ssl/test/ssl_cipher_SUITE.erl
index f2dc1b52c1..3433f9a445 100644
--- a/lib/ssl/test/ssl_cipher_SUITE.erl
+++ b/lib/ssl/test/ssl_cipher_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -38,7 +38,7 @@
suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
- [aes_decipher_good, aes_decipher_good_tls11, aes_decipher_fail, aes_decipher_fail_tls11].
+ [aes_decipher_good, aes_decipher_fail, padding_test].
groups() ->
[].
@@ -73,93 +73,122 @@ end_per_testcase(_TestCase, Config) ->
%% Test Cases --------------------------------------------------------
%%--------------------------------------------------------------------
aes_decipher_good() ->
- [{doc,"Decipher a known cryptotext."}].
+ [{doc,"Decipher a known cryptotext using a correct key"}].
aes_decipher_good(Config) when is_list(Config) ->
HashSz = 32,
- CipherState = #cipher_state{iv = <<59,201,85,117,188,206,224,136,5,109,46,70,104,79,4,9>>,
- key = <<72,196,247,97,62,213,222,109,210,204,217,186,172,184,197,148>>},
- Fragment = <<220,193,179,139,171,33,143,245,202,47,123,251,13,232,114,8,
- 190,162,74,31,186,227,119,155,94,74,119,79,169,193,240,160,
- 198,181,81,19,98,162,213,228,74,224,253,168,156,59,195,122,
- 108,101,107,242,20,15,169,150,163,107,101,94,93,104,241,165>>,
- Content = <<183,139,16,132,10,209,67,86,168,100,61,217,145,57,36,56, "HELLO\n">>,
- Mac = <<71,136,212,107,223,200,70,232,127,116,148,205,232,35,158,113,237,174,15,217,192,168,35,8,6,107,107,233,25,174,90,111>>,
- Version = {3,0},
- {Content, Mac, _} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, Fragment, Version),
- Version1 = {3,1},
- {Content, Mac, _} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, Fragment, Version1),
- ok.
+ CipherState = correct_cipher_state(),
+ decipher_check_good(HashSz, CipherState, {3,0}),
+ decipher_check_good(HashSz, CipherState, {3,1}),
+ decipher_check_good(HashSz, CipherState, {3,2}),
+ decipher_check_good(HashSz, CipherState, {3,3}).
%%--------------------------------------------------------------------
-
-aes_decipher_good_tls11() ->
- [{doc,"Decipher a known TLS 1.1 cryptotext."}].
-
-%% the fragment is actuall a TLS 1.1 record, with
-%% Version = TLS 1.1, we get the correct NextIV in #cipher_state
-aes_decipher_good_tls11(Config) when is_list(Config) ->
- HashSz = 32,
- CipherState = #cipher_state{iv = <<59,201,85,117,188,206,224,136,5,109,46,70,104,79,4,9>>,
- key = <<72,196,247,97,62,213,222,109,210,204,217,186,172,184,197,148>>},
- Fragment = <<220,193,179,139,171,33,143,245,202,47,123,251,13,232,114,8,
- 190,162,74,31,186,227,119,155,94,74,119,79,169,193,240,160,
- 198,181,81,19,98,162,213,228,74,224,253,168,156,59,195,122,
- 108,101,107,242,20,15,169,150,163,107,101,94,93,104,241,165>>,
- Content = <<"HELLO\n">>,
- NextIV = <<183,139,16,132,10,209,67,86,168,100,61,217,145,57,36,56>>,
- Mac = <<71,136,212,107,223,200,70,232,127,116,148,205,232,35,158,113,237,174,15,217,192,168,35,8,6,107,107,233,25,174,90,111>>,
- Version = {3,2},
- {Content, Mac, #cipher_state{iv = NextIV}} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, Fragment, Version),
- Version1 = {3,2},
- {Content, Mac, #cipher_state{iv = NextIV}} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, Fragment, Version1),
- ok.
-
-%%--------------------------------------------------------------------
-
aes_decipher_fail() ->
- [{doc,"Decipher a known cryptotext."}].
+ [{doc,"Decipher a known cryptotext using a incorrect key"}].
-%% same as above, last byte of key replaced
aes_decipher_fail(Config) when is_list(Config) ->
HashSz = 32,
- CipherState = #cipher_state{iv = <<59,201,85,117,188,206,224,136,5,109,46,70,104,79,4,9>>,
- key = <<72,196,247,97,62,213,222,109,210,204,217,186,172,184,197,254>>},
- Fragment = <<220,193,179,139,171,33,143,245,202,47,123,251,13,232,114,8,
- 190,162,74,31,186,227,119,155,94,74,119,79,169,193,240,160,
- 198,181,81,19,98,162,213,228,74,224,253,168,156,59,195,122,
- 108,101,107,242,20,15,169,150,163,107,101,94,93,104,241,165>>,
- Version = {3,0},
- {Content, Mac, _} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, Fragment, Version),
- 32 = byte_size(Content),
- 32 = byte_size(Mac),
- Version1 = {3,1},
- {Content1, Mac1, _} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, Fragment, Version1),
- 32 = byte_size(Content1),
- 32 = byte_size(Mac1),
- ok.
+ CipherState = incorrect_cipher_state(),
+ decipher_check_fail(HashSz, CipherState, {3,0}),
+ decipher_check_fail(HashSz, CipherState, {3,1}),
+ decipher_check_fail(HashSz, CipherState, {3,2}),
+ decipher_check_fail(HashSz, CipherState, {3,3}).
%%--------------------------------------------------------------------
-
-aes_decipher_fail_tls11() ->
- [{doc,"Decipher a known TLS 1.1 cryptotext."}].
-
-%% same as above, last byte of key replaced
-%% stricter padding checks in TLS 1.1 mean we get an alert instead
-aes_decipher_fail_tls11(Config) when is_list(Config) ->
- HashSz = 32,
- CipherState = #cipher_state{iv = <<59,201,85,117,188,206,224,136,5,109,46,70,104,79,4,9>>,
- key = <<72,196,247,97,62,213,222,109,210,204,217,186,172,184,197,254>>},
- Fragment = <<220,193,179,139,171,33,143,245,202,47,123,251,13,232,114,8,
- 190,162,74,31,186,227,119,155,94,74,119,79,169,193,240,160,
- 198,181,81,19,98,162,213,228,74,224,253,168,156,59,195,122,
- 108,101,107,242,20,15,169,150,163,107,101,94,93,104,241,165>>,
- Version = {3,2},
- #alert{level = ?FATAL, description = ?BAD_RECORD_MAC} =
- ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, Fragment, Version),
- Version1 = {3,3},
- #alert{level = ?FATAL, description = ?BAD_RECORD_MAC} =
- ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, Fragment, Version1),
- ok.
-
+padding_test(Config) when is_list(Config) ->
+ HashSz = 16,
+ CipherState = correct_cipher_state(),
+ pad_test(HashSz, CipherState, {3,0}),
+ pad_test(HashSz, CipherState, {3,1}),
+ pad_test(HashSz, CipherState, {3,2}),
+ pad_test(HashSz, CipherState, {3,3}).
+
+%%--------------------------------------------------------------------
+% Internal functions --------------------------------------------------------
%%--------------------------------------------------------------------
+decipher_check_good(HashSz, CipherState, Version) ->
+ {Content, NextIV, Mac} = content_nextiv_mac(Version),
+ {Content, Mac, _} =
+ ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, aes_fragment(Version), Version, true).
+
+decipher_check_fail(HashSz, CipherState, Version) ->
+ {Content, NextIV, Mac} = content_nextiv_mac(Version),
+ true = {Content, Mac, #cipher_state{iv = NextIV}} =/=
+ ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, aes_fragment(Version), Version, true).
+
+pad_test(HashSz, CipherState, {3,0} = Version) ->
+ %% 3.0 does not have padding test
+ {Content, NextIV, Mac} = badpad_content_nextiv_mac(Version),
+ {Content, Mac, #cipher_state{iv = NextIV}} =
+ ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, badpad_aes_fragment({3,0}), {3,0}, true),
+ {Content, Mac, #cipher_state{iv = NextIV}} =
+ ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, badpad_aes_fragment({3,0}), {3,0}, false);
+pad_test(HashSz, CipherState, {3,1} = Version) ->
+ %% 3.1 should have padding test, but may be disabled
+ {Content, NextIV, Mac} = badpad_content_nextiv_mac(Version),
+ BadCont = badpad_content(Content),
+ {Content, Mac, #cipher_state{iv = NextIV}} =
+ ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, badpad_aes_fragment({3,1}) , {3,1}, false),
+ {BadCont, Mac, #cipher_state{iv = NextIV}} =
+ ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, badpad_aes_fragment({3,1}), {3,1}, true);
+pad_test(HashSz, CipherState, Version) ->
+ %% 3.2 and 3.3 must have padding test
+ {Content, NextIV, Mac} = badpad_content_nextiv_mac(Version),
+ BadCont = badpad_content(Content),
+ {BadCont, Mac, #cipher_state{iv = NextIV}} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState,
+ badpad_aes_fragment(Version), Version, false),
+ {BadCont, Mac, #cipher_state{iv = NextIV}} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState,
+ badpad_aes_fragment(Version), Version, true).
+
+aes_fragment({3,N}) when N == 0; N == 1->
+ <<197,9,6,109,242,87,80,154,85,250,110,81,119,95,65,185,53,206,216,153,246,169,
+ 119,177,178,238,248,174,253,220,242,81,33,0,177,251,91,44,247,53,183,198,165,
+ 63,20,194,159,107>>;
+
+aes_fragment(_) ->
+ <<220,193,179,139,171,33,143,245,202,47,123,251,13,232,114,8,
+ 190,162,74,31,186,227,119,155,94,74,119,79,169,193,240,160,
+ 198,181,81,19,98,162,213,228,74,224,253,168,156,59,195,122,
+ 108,101,107,242,20,15,169,150,163,107,101,94,93,104,241,165>>.
+
+badpad_aes_fragment({3,N}) when N == 0; N == 1 ->
+ <<186,139,125,10,118,21,26,248,120,108,193,104,87,118,145,79,225,55,228,10,105,
+ 30,190,37,1,88,139,243,210,99,65,41>>;
+badpad_aes_fragment(_) ->
+ <<137,31,14,77,228,80,76,103,183,125,55,250,68,190,123,131,117,23,229,180,207,
+ 94,121,137,117,157,109,99,113,61,190,138,131,229,201,120,142,179,172,48,77,
+ 234,19,240,33,38,91,93>>.
+
+content_nextiv_mac({3,N}) when N == 0; N == 1 ->
+ {<<"HELLO\n">>,
+ <<72,196,247,97,62,213,222,109,210,204,217,186,172,184, 197,148>>,
+ <<71,136,212,107,223,200,70,232,127,116,148,205,232,35,158,113,237,174,15,217,192,168,35,8,6,107,107,233,25,174,90,111>>};
+content_nextiv_mac(_) ->
+ {<<"HELLO\n">>,
+ <<183,139,16,132,10,209,67,86,168,100,61,217,145,57,36,56>>,
+ <<71,136,212,107,223,200,70,232,127,116,148,205,232,35,158,113,237,174,15,217,192,168,35,8,6,107,107,233,25,174,90,111>>}.
+
+badpad_content_nextiv_mac({3,N}) when N == 0; N == 1 ->
+ {<<"HELLO\n">>,
+ <<225,55,228,10,105,30,190,37,1,88,139,243,210,99,65,41>>,
+ <<183,139,16,132,10,209,67,86,168,100,61,217,145,57,36,56>>
+ };
+badpad_content_nextiv_mac(_) ->
+ {<<"HELLO\n">>,
+ <<133,211,45,189,179,229,56,86,11,178,239,159,14,160,253,140>>,
+ <<183,139,16,132,10,209,67,86,168,100,61,217,145,57,36,56>>
+ }.
+
+badpad_content(Content) ->
+ %% BadContent will fail mac test
+ <<16#F0, Content/binary>>.
+
+correct_cipher_state() ->
+ #cipher_state{iv = <<59,201,85,117,188,206,224,136,5,109,46,70,104,79,4,9>>,
+ key = <<72,196,247,97,62,213,222,109,210,204,217,186,172,184,197,148>>}.
+
+incorrect_cipher_state() ->
+ #cipher_state{iv = <<59,201,85,117,188,206,224,136,5,109,46,70,104,79,4,9>>,
+ key = <<72,196,247,97,62,213,222,109,210,204,217,186,172,184,197,254>>}.
+
diff --git a/lib/ssl/test/ssl_pem_cache_SUITE.erl b/lib/ssl/test/ssl_pem_cache_SUITE.erl
new file mode 100644
index 0000000000..843079e2fe
--- /dev/null
+++ b/lib/ssl/test/ssl_pem_cache_SUITE.erl
@@ -0,0 +1,127 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.2
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+
+-module(ssl_pem_cache_SUITE).
+
+%% Note: This directive should only be used in test suites.
+-compile(export_all).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("kernel/include/file.hrl").
+
+-define(CLEANUP_INTERVAL, 5000).
+
+%%--------------------------------------------------------------------
+%% Common Test interface functions -----------------------------------
+%%--------------------------------------------------------------------
+all() ->
+ [pem_cleanup].
+
+groups() ->
+ [].
+
+init_per_suite(Config0) ->
+ catch crypto:stop(),
+ try crypto:start() of
+ ok ->
+ ssl:start(),
+ %% make rsa certs using oppenssl
+ Result =
+ (catch make_certs:all(?config(data_dir, Config0),
+ ?config(priv_dir, Config0))),
+ ct:log("Make certs ~p~n", [Result]),
+
+ Config1 = ssl_test_lib:make_dsa_cert(Config0),
+ ssl_test_lib:cert_options(Config1)
+ catch _:_ ->
+ {skip, "Crypto did not start"}
+ end.
+
+end_per_suite(_Config) ->
+ application:stop(crypto).
+
+init_per_group(_GroupName, Config) ->
+ Config.
+
+end_per_group(_GroupName, Config) ->
+ Config.
+
+init_per_testcase(pem_cleanup, Config) ->
+ ssl:stop(),
+ application:load(ssl),
+ application:set_env(ssl, ssl_pem_cache_clean, ?CLEANUP_INTERVAL),
+ ssl:start(),
+ Config.
+
+end_per_testcase(_TestCase, Config) ->
+ %%ssl:stop(),
+ Config.
+
+%%--------------------------------------------------------------------
+%% Test Cases --------------------------------------------------------
+%%--------------------------------------------------------------------
+pem_cleanup() ->
+ [{doc, "Test pem cache invalidate mechanism"}].
+pem_cleanup(Config)when is_list(Config) ->
+ process_flag(trap_exit, true),
+ ClientOpts = ?config(client_opts, Config),
+ ServerOpts = ?config(server_opts, Config),
+ {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+
+ Server =
+ ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {mfa, {ssl_test_lib, no_result, []}},
+ {options, ServerOpts}]),
+ Port = ssl_test_lib:inet_port(Server),
+ Client =
+ ssl_test_lib:start_client([{node, ClientNode},
+ {port, Port}, {host, Hostname},
+ {mfa, {ssl_test_lib, no_result, []}},
+ {from, self()}, {options, ClientOpts}]),
+
+ Size = ssl_pkix_db:db_size(get_pem_cache()),
+ Certfile = proplists:get_value(certfile, ServerOpts),
+ {ok, FileInfo} = file:read_file_info(Certfile),
+ Time = later(),
+ ok = file:write_file_info(Certfile, FileInfo#file_info{mtime = Time}),
+ ct:sleep(2 * ?CLEANUP_INTERVAL),
+ Size1 = ssl_pkix_db:db_size(get_pem_cache()),
+ ssl_test_lib:close(Server),
+ ssl_test_lib:close(Client),
+ false = Size == Size1.
+
+get_pem_cache() ->
+ {status, _, _, StatusInfo} = sys:get_status(whereis(ssl_manager)),
+ [_, _,_, _, Prop] = StatusInfo,
+ State = ssl_test_lib:state(Prop),
+ case element(5, State) of
+ [_CertDb, _FileRefDb, PemChace] ->
+ PemChace;
+ _ ->
+ undefined
+ end.
+
+later()->
+ DateTime = calendar:now_to_local_time(os:timestamp()),
+ Gregorian = calendar:datetime_to_gregorian_seconds(DateTime),
+ calendar:gregorian_seconds_to_datetime(Gregorian + (2 * ?CLEANUP_INTERVAL)).
+
diff --git a/lib/ssl/test/ssl_test_lib.erl b/lib/ssl/test/ssl_test_lib.erl
index d2e6e41482..d6fbb73249 100644
--- a/lib/ssl/test/ssl_test_lib.erl
+++ b/lib/ssl/test/ssl_test_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -187,6 +187,7 @@ run_client(Opts) ->
Transport = proplists:get_value(transport, Opts, ssl),
Options = proplists:get_value(options, Opts),
ct:log("~p:~p~n~p:connect(~p, ~p)@~p~n", [?MODULE,?LINE, Transport, Host, Port, Node]),
+ ct:log("SSLOpts: ~p", [Options]),
case rpc:call(Node, Transport, connect, [Host, Port, Options]) of
{ok, Socket} ->
Pid ! {connected, Socket},
@@ -918,6 +919,10 @@ srp_dss_suites() ->
{srp_dss, aes_256_cbc, sha}],
ssl_cipher:filter_suites(Suites).
+rc4_suites(Version) ->
+ Suites = ssl_cipher:rc4_suites(Version),
+ ssl_cipher:filter_suites(Suites).
+
pem_to_der(File) ->
{ok, PemBin} = file:read_file(File),
public_key:pem_decode(PemBin).
@@ -1125,7 +1130,8 @@ filter_suites(Ciphers0) ->
Supported0 = ssl_cipher:suites(Version)
++ ssl_cipher:anonymous_suites(Version)
++ ssl_cipher:psk_suites(Version)
- ++ ssl_cipher:srp_suites(),
+ ++ ssl_cipher:srp_suites()
+ ++ ssl_cipher:rc4_suites(Version),
Supported1 = ssl_cipher:filter_suites(Supported0),
Supported2 = [ssl:suite_definition(S) || S <- Supported1],
[Cipher || Cipher <- Ciphers0, lists:member(Cipher, Supported2)].
diff --git a/lib/ssl/test/ssl_upgrade_SUITE.erl b/lib/ssl/test/ssl_upgrade_SUITE.erl
new file mode 100644
index 0000000000..6a6a1b4a7a
--- /dev/null
+++ b/lib/ssl/test/ssl_upgrade_SUITE.erl
@@ -0,0 +1,162 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.2
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(ssl_upgrade_SUITE).
+
+%% Note: This directive should only be used in test suites.
+-compile(export_all).
+
+-include_lib("common_test/include/ct.hrl").
+
+-record(state, {
+ config,
+ server,
+ client,
+ soft
+ }).
+
+all() ->
+ [
+ minor_upgrade,
+ major_upgrade
+ ].
+
+init_per_suite(Config0) ->
+ catch crypto:stop(),
+ try crypto:start() of
+ ok ->
+ case ct_release_test:init(Config0) of
+ {skip, Reason} ->
+ {skip, Reason};
+ Config ->
+ Result =
+ (catch make_certs:all(?config(data_dir, Config),
+ ?config(priv_dir, Config))),
+ ct:log("Make certs ~p~n", [Result]),
+ ssl_test_lib:cert_options(Config)
+ end
+ catch _:_ ->
+ {skip, "Crypto did not start"}
+ end.
+
+end_per_suite(Config) ->
+ ct_release_test:cleanup(Config),
+ crypto:stop().
+
+init_per_testcase(_TestCase, Config) ->
+ Config.
+end_per_testcase(_TestCase, Config) ->
+ Config.
+
+major_upgrade(Config) when is_list(Config) ->
+ ct_release_test:upgrade(ssl, major,{?MODULE, #state{config = Config}}, Config).
+
+minor_upgrade(Config) when is_list(Config) ->
+ ct_release_test:upgrade(ssl, minor,{?MODULE, #state{config = Config}}, Config).
+
+upgrade_init(CTData, #state{config = Config} = State) ->
+ {ok, {_, _, Up, _Down}} = ct_release_test:get_appup(CTData, ssl),
+ ct:pal("Up: ~p", [Up]),
+ Soft = is_soft(Up), %% It is symmetrical, if upgrade is soft so is downgrade
+ case Soft of
+ true ->
+ {Server, Client} = soft_start_connection(Config),
+ State#state{server = Server, client = Client,
+ soft = Soft};
+ false ->
+ State#state{soft = Soft}
+ end.
+
+upgrade_upgraded(_, #state{soft = false, config = Config} = State) ->
+ {Server, Client} = restart_start_connection(Config),
+ ssl_test_lib:check_result(Server, ok, Client, ok),
+ ssl_test_lib:close(Server),
+ ssl_test_lib:close(Client),
+ State;
+
+upgrade_upgraded(_, #state{server = Server0, client = Client0,
+ config = Config, soft = true} = State) ->
+ Server0 ! changed_version,
+ Client0 ! changed_version,
+ ssl_test_lib:check_result(Server0, ok, Client0, ok),
+ ssl_test_lib:close(Server0),
+ ssl_test_lib:close(Client0),
+ {Server, Client} = soft_start_connection(Config),
+ State#state{server = Server, client = Client}.
+
+upgrade_downgraded(_, #state{soft = false, config = Config} = State) ->
+ {Server, Client} = restart_start_connection(Config),
+ ssl_test_lib:check_result(Server, ok, Client, ok),
+ ssl_test_lib:close(Server),
+ ssl_test_lib:close(Client),
+ State;
+
+upgrade_downgraded(_, #state{server = Server, client = Client, soft = true} = State) ->
+ Server ! changed_version,
+ Client ! changed_version,
+ ssl_test_lib:check_result(Server, ok, Client, ok),
+ ssl_test_lib:close(Server),
+ ssl_test_lib:close(Client),
+ State.
+
+use_connection(Socket) ->
+ ssl_test_lib:send_recv_result_active(Socket),
+ receive
+ changed_version ->
+ ssl_test_lib:send_recv_result_active(Socket)
+ end.
+
+soft_start_connection(Config) ->
+ ClientOpts = ?config(client_verification_opts, Config),
+ ServerOpts = ?config(server_verification_opts, Config),
+ {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+ Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {mfa, {?MODULE, use_connection, []}},
+ {options, ServerOpts}]),
+
+ Port = ssl_test_lib:inet_port(Server),
+ Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
+ {host, Hostname},
+ {from, self()},
+ {mfa, {?MODULE, use_connection, []}},
+ {options, ClientOpts}]),
+ {Server, Client}.
+
+restart_start_connection(Config) ->
+ ClientOpts = ?config(client_verification_opts, Config),
+ ServerOpts = ?config(server_verification_opts, Config),
+ {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+ Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {mfa, {ssl_test_lib, send_recv_result_active, []}},
+ {options, ServerOpts}]),
+
+ Port = ssl_test_lib:inet_port(Server),
+ Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
+ {host, Hostname},
+ {from, self()},
+ {mfa, {ssl_test_lib, send_recv_result_active, []}},
+ {options, ClientOpts}]),
+ {Server, Client}.
+
+is_soft([{restart_application, ssl}]) ->
+ false;
+is_soft(_) ->
+ true.
+
diff --git a/lib/ssl/vsn.mk b/lib/ssl/vsn.mk
index bda974da0e..171147adf2 100644
--- a/lib/ssl/vsn.mk
+++ b/lib/ssl/vsn.mk
@@ -1 +1 @@
-SSL_VSN = 5.3.8
+SSL_VSN = 7.0
diff --git a/lib/stdlib/doc/src/math.xml b/lib/stdlib/doc/src/math.xml
index 43cd20e726..7cfc8a1175 100644
--- a/lib/stdlib/doc/src/math.xml
+++ b/lib/stdlib/doc/src/math.xml
@@ -67,6 +67,7 @@
<name name="atanh" arity="1"/>
<name name="exp" arity="1"/>
<name name="log" arity="1"/>
+ <name name="log2" arity="1"/>
<name name="log10" arity="1"/>
<name name="pow" arity="2"/>
<name name="sqrt" arity="1"/>
diff --git a/lib/stdlib/src/c.erl b/lib/stdlib/src/c.erl
index c2256c0cf9..9860adf04d 100644
--- a/lib/stdlib/src/c.erl
+++ b/lib/stdlib/src/c.erl
@@ -509,9 +509,12 @@ m(M) ->
{exports,E} = lists:keyfind(exports, 1, L),
Time = get_compile_time(L),
COpts = get_compile_options(L),
- format("Module ~w compiled: ",[M]), print_time(Time),
- format("Compiler options: ~p~n", [COpts]),
+ format("Module: ~w~n", [M]),
+ print_md5(L),
+ format("Compiled: "),
+ print_time(Time),
print_object_file(M),
+ format("Compiler options: ~p~n", [COpts]),
format("Exports: ~n",[]), print_exports(keysort(1, E)).
print_object_file(Mod) ->
@@ -522,6 +525,12 @@ print_object_file(Mod) ->
ignore
end.
+print_md5(L) ->
+ case lists:keyfind(md5, 1, L) of
+ {md5,<<MD5:128>>} -> io:format("MD5: ~.16b~n",[MD5]);
+ _ -> ok
+ end.
+
get_compile_time(L) ->
case get_compile_info(L, time) of
{ok,Val} -> Val;
@@ -569,8 +578,8 @@ split_print_exports([{F1, A1}|T1], [{F2, A2} | T2]) ->
split_print_exports([], []) -> ok.
print_time({Year,Month,Day,Hour,Min,_Secs}) ->
- format("Date: ~s ~w ~w, ", [month(Month),Day,Year]),
- format("Time: ~.2.0w.~.2.0w~n", [Hour,Min]);
+ format("~s ~w ~w, ", [month(Month),Day,Year]),
+ format("~.2.0w:~.2.0w~n", [Hour,Min]);
print_time(notime) ->
format("No compile time info available~n",[]).
diff --git a/lib/stdlib/src/dict.erl b/lib/stdlib/src/dict.erl
index cf8fb3114a..5a9f63c5e2 100644
--- a/lib/stdlib/src/dict.erl
+++ b/lib/stdlib/src/dict.erl
@@ -417,6 +417,8 @@ on_bucket(F, T, Slot) ->
%% could have implemented map and filter using fold but these are
%% faster. We hope!
+fold_dict(F, Acc, #dict{size=0}) when is_function(F, 3) ->
+ Acc;
fold_dict(F, Acc, D) ->
Segs = D#dict.segs,
fold_segs(F, Acc, Segs, tuple_size(Segs)).
@@ -434,6 +436,8 @@ fold_bucket(F, Acc, [?kv(Key,Val)|Bkt]) ->
fold_bucket(F, F(Key, Val, Acc), Bkt);
fold_bucket(F, Acc, []) when is_function(F, 3) -> Acc.
+map_dict(F, #dict{size=0} = Dict) when is_function(F, 2) ->
+ Dict;
map_dict(F, D) ->
Segs0 = tuple_to_list(D#dict.segs),
Segs1 = map_seg_list(F, Segs0),
@@ -453,6 +457,8 @@ map_bucket(F, [?kv(Key,Val)|Bkt]) ->
[?kv(Key,F(Key, Val))|map_bucket(F, Bkt)];
map_bucket(F, []) when is_function(F, 2) -> [].
+filter_dict(F, #dict{size=0} = Dict) when is_function(F, 2) ->
+ Dict;
filter_dict(F, D) ->
Segs0 = tuple_to_list(D#dict.segs),
{Segs1,Fc} = filter_seg_list(F, Segs0, [], 0),
diff --git a/lib/stdlib/src/erl_lint.erl b/lib/stdlib/src/erl_lint.erl
index 772b0bf642..dabedff0d4 100644
--- a/lib/stdlib/src/erl_lint.erl
+++ b/lib/stdlib/src/erl_lint.erl
@@ -744,6 +744,8 @@ attribute_state(Form, St) ->
%% State'
%% Allow for record, type and opaque type definitions and spec
%% declarations to be intersperced within function definitions.
+%% Dialyzer attributes are also allowed everywhere, but are not
+%% checked at all.
function_state({attribute,L,record,{Name,Fields}}, St) ->
record_def(L, Name, Fields, St);
@@ -753,6 +755,8 @@ function_state({attribute,L,opaque,{TypeName,TypeDef,Args}}, St) ->
type_def(opaque, L, TypeName, TypeDef, Args, St);
function_state({attribute,L,spec,{Fun,Types}}, St) ->
spec_decl(L, Fun, Types, St);
+function_state({attribute,_L,dialyzer,_Val}, St) ->
+ St;
function_state({attribute,La,Attr,_Val}, St) ->
add_error(La, {attribute,Attr}, St);
function_state({function,L,N,A,Cs}, St) ->
diff --git a/lib/stdlib/src/math.erl b/lib/stdlib/src/math.erl
index 98a70b1644..43f736e54c 100644
--- a/lib/stdlib/src/math.erl
+++ b/lib/stdlib/src/math.erl
@@ -24,7 +24,7 @@
-export([sin/1, cos/1, tan/1, asin/1, acos/1, atan/1, atan2/2, sinh/1,
cosh/1, tanh/1, asinh/1, acosh/1, atanh/1, exp/1, log/1,
- log10/1, pow/2, sqrt/1, erf/1, erfc/1]).
+ log2/1, log10/1, pow/2, sqrt/1, erf/1, erfc/1]).
-spec acos(X) -> float() when
X :: number().
@@ -92,6 +92,11 @@ exp(_) ->
log(_) ->
erlang:nif_error(undef).
+-spec log2(X) -> float() when
+ X :: number().
+log2(_) ->
+ erlang:nif_error(undef).
+
-spec log10(X) -> float() when
X :: number().
log10(_) ->
diff --git a/lib/stdlib/src/otp_internal.erl b/lib/stdlib/src/otp_internal.erl
index 0ace87ef5c..4a338798d0 100644
--- a/lib/stdlib/src/otp_internal.erl
+++ b/lib/stdlib/src/otp_internal.erl
@@ -578,6 +578,19 @@ obsolete_1(asn1rt, utf8_binary_to_list, 1) ->
obsolete_1(asn1rt, utf8_list_to_binary, 1) ->
{deprecated,{unicode,characters_to_binary,1}};
+%% Added in OTP 18.
+obsolete_1(core_lib, get_anno, 1) ->
+ {deprecated,{cerl,get_ann,1}};
+obsolete_1(core_lib, set_anno, 2) ->
+ {deprecated,{cerl,set_ann,2}};
+obsolete_1(core_lib, is_literal, 1) ->
+ {deprecated,{cerl,is_literal,1}};
+obsolete_1(core_lib, is_literal_list, 1) ->
+ {deprecated,"deprecated; use lists:all(fun cerl:is_literal/1, L)"
+ " instead"};
+obsolete_1(core_lib, literal_value, 1) ->
+ {deprecated,{core_lib,concrete,1}};
+
obsolete_1(_, _, _) ->
no.
diff --git a/lib/stdlib/src/string.erl b/lib/stdlib/src/string.erl
index f9b083a56d..f6903d1c3d 100644
--- a/lib/stdlib/src/string.erl
+++ b/lib/stdlib/src/string.erl
@@ -221,23 +221,47 @@ substr2([_|String], S) -> substr2(String, S-1).
Tokens :: [Token :: nonempty_string()].
tokens(S, Seps) ->
- tokens1(S, Seps, []).
+ case Seps of
+ [] ->
+ case S of
+ [] -> [];
+ [_|_] -> [S]
+ end;
+ [C] ->
+ tokens_single_1(reverse(S), C, []);
+ [_|_] ->
+ tokens_multiple_1(reverse(S), Seps, [])
+ end.
-tokens1([C|S], Seps, Toks) ->
+tokens_single_1([Sep|S], Sep, Toks) ->
+ tokens_single_1(S, Sep, Toks);
+tokens_single_1([C|S], Sep, Toks) ->
+ tokens_single_2(S, Sep, Toks, [C]);
+tokens_single_1([], _, Toks) ->
+ Toks.
+
+tokens_single_2([Sep|S], Sep, Toks, Tok) ->
+ tokens_single_1(S, Sep, [Tok|Toks]);
+tokens_single_2([C|S], Sep, Toks, Tok) ->
+ tokens_single_2(S, Sep, Toks, [C|Tok]);
+tokens_single_2([], _Sep, Toks, Tok) ->
+ [Tok|Toks].
+
+tokens_multiple_1([C|S], Seps, Toks) ->
case member(C, Seps) of
- true -> tokens1(S, Seps, Toks);
- false -> tokens2(S, Seps, Toks, [C])
+ true -> tokens_multiple_1(S, Seps, Toks);
+ false -> tokens_multiple_2(S, Seps, Toks, [C])
end;
-tokens1([], _Seps, Toks) ->
- reverse(Toks).
+tokens_multiple_1([], _Seps, Toks) ->
+ Toks.
-tokens2([C|S], Seps, Toks, Cs) ->
+tokens_multiple_2([C|S], Seps, Toks, Tok) ->
case member(C, Seps) of
- true -> tokens1(S, Seps, [reverse(Cs)|Toks]);
- false -> tokens2(S, Seps, Toks, [C|Cs])
+ true -> tokens_multiple_1(S, Seps, [Tok|Toks]);
+ false -> tokens_multiple_2(S, Seps, Toks, [C|Tok])
end;
-tokens2([], _Seps, Toks, Cs) ->
- reverse([reverse(Cs)|Toks]).
+tokens_multiple_2([], _Seps, Toks, Tok) ->
+ [Tok|Toks].
-spec chars(Character, Number) -> String when
Character :: char(),
diff --git a/lib/stdlib/test/stdlib_SUITE.erl b/lib/stdlib/test/stdlib_SUITE.erl
index 6669a21b9c..206eb4fd74 100644
--- a/lib/stdlib/test/stdlib_SUITE.erl
+++ b/lib/stdlib/test/stdlib_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -172,9 +172,16 @@ major_upgrade(Config) ->
%% Version numbers are checked by ct_release_test, so there is nothing
%% more to check here...
-upgrade_init(State) ->
+upgrade_init(CtData,State) ->
+ {ok,{FromVsn,ToVsn}} = ct_release_test:get_app_vsns(CtData,stdlib),
+ case ct_release_test:get_appup(CtData,stdlib) of
+ {ok,{FromVsn,ToVsn,[restart_new_emulator],[restart_new_emulator]}} ->
+ io:format("Upgrade/downgrade ~p <--> ~p",[FromVsn,ToVsn]);
+ {error,{vsn_not_found,_}} when FromVsn==ToVsn ->
+ io:format("No upgrade test for stdlib, same version")
+ end,
State.
-upgrade_upgraded(State) ->
+upgrade_upgraded(_CtData,State) ->
State.
-upgrade_downgraded(State) ->
+upgrade_downgraded(_CtData,State) ->
State.
diff --git a/lib/stdlib/test/string_SUITE.erl b/lib/stdlib/test/string_SUITE.erl
index fccd1bef95..a55c710d50 100644
--- a/lib/stdlib/test/string_SUITE.erl
+++ b/lib/stdlib/test/string_SUITE.erl
@@ -217,21 +217,39 @@ substr(Config) when is_list(Config) ->
?line {'EXIT',_} = (catch string:substr("1234", "1")),
ok.
-tokens(suite) ->
- [];
-tokens(doc) ->
- [];
tokens(Config) when is_list(Config) ->
- ?line [] = string:tokens("",""),
- ?line [] = string:tokens("abc","abc"),
- ?line ["abc"] = string:tokens("abc", ""),
- ?line ["1","2 34","4","5"] = string:tokens("1,2 34,4;5", ";,"),
- %% invalid arg type
- ?line {'EXIT',_} = (catch string:tokens('x,y', ",")),
+ [] = string:tokens("",""),
+ [] = string:tokens("abc","abc"),
+ ["abc"] = string:tokens("abc", ""),
+ ["1","2 34","45","5","6","7"] = do_tokens("1,2 34,45;5,;6;,7", ";,"),
+
%% invalid arg type
- ?line {'EXIT',_} = (catch string:tokens("x,y", ',')),
+ {'EXIT',_} = (catch string:tokens('x,y', ",")),
+ {'EXIT',_} = (catch string:tokens("x,y", ',')),
ok.
+do_tokens(S0, Sep0) ->
+ [H|T] = Sep0,
+ S = [replace_sep(C, T, H) || C <- S0],
+ Sep = [H],
+ io:format("~p ~p\n", [S0,Sep0]),
+ io:format("~p ~p\n", [S,Sep]),
+
+ Res = string:tokens(S0, Sep0),
+ Res = string:tokens(Sep0++S0, Sep0),
+ Res = string:tokens(S0++Sep0, Sep0),
+
+ Res = string:tokens(S, Sep),
+ Res = string:tokens(Sep++S, Sep),
+ Res = string:tokens(S++Sep, Sep),
+
+ Res.
+
+replace_sep(C, Seps, New) ->
+ case lists:member(C, Seps) of
+ true -> New;
+ false -> C
+ end.
chars(suite) ->
[];
diff --git a/lib/syntax_tools/src/epp_dodger.erl b/lib/syntax_tools/src/epp_dodger.erl
index 7e12eab1b5..3ca7a8197e 100644
--- a/lib/syntax_tools/src/epp_dodger.erl
+++ b/lib/syntax_tools/src/epp_dodger.erl
@@ -88,7 +88,7 @@
%% This is a so-called Erlang I/O ErrorInfo structure; see the {@link
%% //stdlib/io} module for details.
--type errorinfo() :: term(). % {integer(), atom(), term()}.
+-type errorinfo() :: {integer(), atom(), term()}.
-type option() :: atom() | {atom(), term()}.
@@ -208,8 +208,8 @@ do_parse_file(DefEncoding, File, Parser, Options) ->
try Parser(Dev, 1, Options)
after ok = file:close(Dev)
end;
- {error, _} = Error ->
- Error
+ {error, Error} ->
+ {error, {0, file, Error}} % defer to file:format_error/1
end.
find_invalid_unicode([H|T]) ->
diff --git a/lib/syntax_tools/src/erl_prettypr.erl b/lib/syntax_tools/src/erl_prettypr.erl
index 877675772f..81272e62de 100644
--- a/lib/syntax_tools/src/erl_prettypr.erl
+++ b/lib/syntax_tools/src/erl_prettypr.erl
@@ -50,8 +50,7 @@
| fun((erl_syntax:syntaxTree(), _, _) -> prettypr:document()).
-type clause_t() :: 'case_expr' | 'cond_expr' | 'fun_expr'
| 'if_expr' | 'receive_expr' | 'try_expr'
- | {'function', prettypr:document()}
- | {'rule', prettypr:document()}.
+ | {'function', prettypr:document()}.
-record(ctxt, {prec = 0 :: integer(),
sub_indent = 2 :: non_neg_integer(),
@@ -587,8 +586,6 @@ lay_2(Node, Ctxt) ->
make_case_clause(D1, D2, D3, Ctxt);
try_expr ->
make_case_clause(D1, D2, D3, Ctxt);
- {rule, N} ->
- make_rule_clause(N, D1, D2, D3, Ctxt);
undefined ->
%% If a clause is formatted out of context, we
%% use a "fun-expression" clause style.
@@ -851,14 +848,10 @@ lay_2(Node, Ctxt) ->
floating(text(".")),
lay(erl_syntax:record_access_field(Node),
set_prec(Ctxt, PrecR))),
- D3 = case erl_syntax:record_access_type(Node) of
- none ->
- D2;
- T ->
- beside(beside(floating(text("#")),
- lay(T, reset_prec(Ctxt))),
- D2)
- end,
+ T = erl_syntax:record_access_type(Node),
+ D3 = beside(beside(floating(text("#")),
+ lay(T, reset_prec(Ctxt))),
+ D2),
maybe_parentheses(beside(D1, D3), Prec, Ctxt);
record_expr ->
@@ -926,15 +919,6 @@ lay_2(Node, Ctxt) ->
D2 = lay(erl_syntax:map_field_exact_value(Node), Ctxt1),
par([D1, floating(text(":=")), D2], Ctxt1#ctxt.break_indent);
- rule ->
- %% Comments on the name will be repeated; cf.
- %% `function'.
- Ctxt1 = reset_prec(Ctxt),
- D1 = lay(erl_syntax:rule_name(Node), Ctxt1),
- D2 = lay_clauses(erl_syntax:rule_clauses(Node),
- {rule, D1}, Ctxt1),
- beside(D2, floating(text(".")));
-
size_qualifier ->
Ctxt1 = set_prec(Ctxt, max_prec()),
D1 = lay(erl_syntax:size_qualifier_body(Node), Ctxt1),
@@ -1073,10 +1057,6 @@ make_fun_clause_head(N, P, Ctxt) ->
beside(N, D)
end.
-make_rule_clause(N, P, G, B, Ctxt) ->
- D = make_fun_clause_head(N, P, Ctxt),
- append_rule_body(B, append_guard(G, D, Ctxt), Ctxt).
-
make_case_clause(P, G, B, Ctxt) ->
append_clause_body(B, append_guard(G, P, Ctxt), Ctxt).
@@ -1092,9 +1072,6 @@ make_if_clause(_P, G, B, Ctxt) ->
append_clause_body(B, D, Ctxt) ->
append_clause_body(B, D, floating(text(" ->")), Ctxt).
-append_rule_body(B, D, Ctxt) ->
- append_clause_body(B, D, floating(text(" :-")), Ctxt).
-
append_clause_body(B, D, S, Ctxt) ->
sep([beside(D, S), nest(Ctxt#ctxt.break_indent, B)]).
diff --git a/lib/syntax_tools/src/erl_syntax.erl b/lib/syntax_tools/src/erl_syntax.erl
index de271d7f2f..3f2a3e05dd 100644
--- a/lib/syntax_tools/src/erl_syntax.erl
+++ b/lib/syntax_tools/src/erl_syntax.erl
@@ -254,7 +254,6 @@
receive_expr_action/1,
receive_expr_clauses/1,
receive_expr_timeout/1,
- record_access/2,
record_access/3,
record_access_argument/1,
record_access_field/1,
@@ -271,10 +270,6 @@
record_index_expr/2,
record_index_expr_field/1,
record_index_expr_type/1,
- rule/2,
- rule_arity/1,
- rule_clauses/1,
- rule_name/1,
size_qualifier/2,
size_qualifier_argument/1,
size_qualifier_body/1,
@@ -472,19 +467,16 @@
%% <td>record_field</td>
%% </tr><tr>
%% <td>record_index_expr</td>
-%% <td>rule</td>
%% <td>size_qualifier</td>
%% <td>string</td>
-%% </tr><tr>
%% <td>text</td>
+%% </tr><tr>
%% <td>try_expr</td>
%% <td>tuple</td>
%% <td>underscore</td>
-%% </tr><tr>
%% <td>variable</td>
+%% </tr><tr>
%% <td>warning_marker</td>
-%% <td></td>
-%% <td></td>
%% </tr>
%% </table></center>
%%
@@ -540,7 +532,6 @@
%% @see record_expr/2
%% @see record_field/2
%% @see record_index_expr/2
-%% @see rule/2
%% @see size_qualifier/2
%% @see string/1
%% @see text/1
@@ -607,10 +598,8 @@ type(Node) ->
{record, _, _, _, _} -> record_expr;
{record, _, _, _} -> record_expr;
{record_field, _, _, _, _} -> record_access;
- {record_field, _, _, _} -> record_access;
{record_index, _, _, _} -> record_index_expr;
{remote, _, _, _} -> module_qualifier;
- {rule, _, _, _, _} -> rule;
{'try', _, _, _, _, _} -> try_expr;
{tuple, _, _} -> tuple;
_ ->
@@ -693,10 +682,9 @@ is_leaf(Node) ->
%% <td>`comment'</td>
%% <td>`error_marker'</td>
%% <td>`eof_marker'</td>
-%% <td>`form_list'</td>
%% </tr><tr>
+%% <td>`form_list'</td>
%% <td>`function'</td>
-%% <td>`rule'</td>
%% <td>`warning_marker'</td>
%% <td>`text'</td>
%% </tr>
@@ -709,7 +697,6 @@ is_leaf(Node) ->
%% @see error_marker/1
%% @see form_list/1
%% @see function/2
-%% @see rule/2
%% @see warning_marker/1
-spec is_form(syntaxTree()) -> boolean().
@@ -722,7 +709,6 @@ is_form(Node) ->
eof_marker -> true;
error_marker -> true;
form_list -> true;
- rule -> true;
warning_marker -> true;
text -> true;
_ -> false
@@ -3480,7 +3466,6 @@ module_qualifier_body(Node) ->
%% @see function_clauses/1
%% @see function_arity/1
%% @see is_form/1
-%% @see rule/2
%% Don't use the name 'function' for this record, to avoid confusion with
%% the tuples on the form {function,Name,Arity} used by erl_parse.
@@ -4310,49 +4295,32 @@ record_index_expr_field(Node) ->
%% =====================================================================
-%% @equiv record_access(Argument, none, Field)
-
--spec record_access(syntaxTree(), syntaxTree()) -> syntaxTree().
-
-record_access(Argument, Field) ->
- record_access(Argument, none, Field).
-
-
-%% =====================================================================
-%% @doc Creates an abstract record field access expression. If
-%% `Type' is not `none', the result represents
-%% "<code><em>Argument</em>#<em>Type</em>.<em>Field</em></code>".
-%%
-%% If `Type' is `none', the result represents
-%% "<code><em>Argument</em>.<em>Field</em></code>". This is a special
-%% form only allowed within Mnemosyne queries.
+%% @doc Creates an abstract record field access expression. The result
+%% represents "<code><em>Argument</em>#<em>Type</em>.<em>Field</em></code>".
%%
-%% @see record_access/2
%% @see record_access_argument/1
%% @see record_access_type/1
%% @see record_access_field/1
%% @see record_expr/3
-record(record_access, {argument :: syntaxTree(),
- type :: 'none' | syntaxTree(),
+ type :: syntaxTree(),
field :: syntaxTree()}).
%% type(Node) = record_access
%% data(Node) = #record_access{argument :: Argument, type :: Type,
%% field :: Field}
%%
-%% Argument = Field = syntaxTree()
-%% Type = none | syntaxTree()
+%% Argument = Type = Field = syntaxTree()
%%
%% `erl_parse' representation:
%%
%% {record_field, Pos, Argument, Type, Field}
-%% {record_field, Pos, Argument, Field}
%%
%% Argument = Field = erl_parse()
%% Type = atom()
--spec record_access(syntaxTree(), 'none' | syntaxTree(), syntaxTree()) ->
+-spec record_access(syntaxTree(), syntaxTree(), syntaxTree()) ->
syntaxTree().
record_access(Argument, Type, Field) ->
@@ -4365,16 +4333,11 @@ revert_record_access(Node) ->
Argument = record_access_argument(Node),
Type = record_access_type(Node),
Field = record_access_field(Node),
- if Type =:= none ->
- {record_field, Pos, Argument, Field};
- true ->
- case type(Type) of
- atom ->
- {record_field, Pos,
- Argument, concrete(Type), Field};
- _ ->
- Node
- end
+ case type(Type) of
+ atom ->
+ {record_field, Pos, Argument, concrete(Type), Field};
+ _ ->
+ Node
end.
@@ -4387,8 +4350,6 @@ revert_record_access(Node) ->
record_access_argument(Node) ->
case unwrap(Node) of
- {record_field, _, Argument, _} ->
- Argument;
{record_field, _, Argument, _, _} ->
Argument;
Node1 ->
@@ -4397,21 +4358,14 @@ record_access_argument(Node) ->
%% =====================================================================
-%% @doc Returns the type subtree of a `record_access' node,
-%% if any. If `Node' represents
-%% "<code><em>Argument</em>.<em>Field</em></code>", `none'
-%% is returned, otherwise if `Node' represents
-%% "<code><em>Argument</em>#<em>Type</em>.<em>Field</em></code>",
-%% `Type' is returned.
+%% @doc Returns the type subtree of a `record_access' node.
%%
%% @see record_access/3
--spec record_access_type(syntaxTree()) -> 'none' | syntaxTree().
+-spec record_access_type(syntaxTree()) -> syntaxTree().
record_access_type(Node) ->
case unwrap(Node) of
- {record_field, _, _, _} ->
- none;
{record_field, Pos, _, Type, _} ->
set_pos(atom(Type), Pos);
Node1 ->
@@ -4428,8 +4382,6 @@ record_access_type(Node) ->
record_access_field(Node) ->
case unwrap(Node) of
- {record_field, _, _, Field} ->
- Field;
{record_field, _, _, _, Field} ->
Field;
Node1 ->
@@ -4808,117 +4760,6 @@ binary_comp_body(Node) ->
%% =====================================================================
-%% @doc Creates an abstract Mnemosyne rule. If `Clauses' is
-%% `[C1, ..., Cn]', the results represents
-%% "<code><em>Name</em> <em>C1</em>; ...; <em>Name</em>
-%% <em>Cn</em>.</code>". More exactly, if each `Ci'
-%% represents "<code>(<em>Pi1</em>, ..., <em>Pim</em>) <em>Gi</em> ->
-%% <em>Bi</em></code>", then the result represents
-%% "<code><em>Name</em>(<em>P11</em>, ..., <em>P1m</em>) <em>G1</em> :-
-%% <em>B1</em>; ...; <em>Name</em>(<em>Pn1</em>, ..., <em>Pnm</em>)
-%% <em>Gn</em> :- <em>Bn</em>.</code>". Rules are source code forms.
-%%
-%% @see rule_name/1
-%% @see rule_clauses/1
-%% @see rule_arity/1
-%% @see is_form/1
-%% @see function/2
-
--record(rule, {name :: syntaxTree(), clauses :: [syntaxTree()]}).
-
-%% type(Node) = rule
-%% data(Node) = #rule{name :: Name, clauses :: Clauses}
-%%
-%% Name = syntaxTree()
-%% Clauses = [syntaxTree()]
-%%
-%% (See `function' for notes on why the arity is not stored.)
-%%
-%% `erl_parse' representation:
-%%
-%% {rule, Pos, Name, Arity, Clauses}
-%%
-%% Name = atom()
-%% Arity = integer()
-%% Clauses = [Clause] \ []
-%% Clause = {clause, ...}
-%%
-%% where the number of patterns in each clause should be equal to
-%% the integer `Arity'; see `clause' for documentation on
-%% `erl_parse' clauses.
-
--spec rule(syntaxTree(), [syntaxTree()]) -> syntaxTree().
-
-rule(Name, Clauses) ->
- tree(rule, #rule{name = Name, clauses = Clauses}).
-
-revert_rule(Node) ->
- Name = rule_name(Node),
- Clauses = [revert_clause(C) || C <- rule_clauses(Node)],
- Pos = get_pos(Node),
- case type(Name) of
- atom ->
- A = rule_arity(Node),
- {rule, Pos, concrete(Name), A, Clauses};
- _ ->
- Node
- end.
-
-
-%% =====================================================================
-%% @doc Returns the name subtree of a `rule' node.
-%%
-%% @see rule/2
-
--spec rule_name(syntaxTree()) -> syntaxTree().
-
-rule_name(Node) ->
- case unwrap(Node) of
- {rule, Pos, Name, _, _} ->
- set_pos(atom(Name), Pos);
- Node1 ->
- (data(Node1))#rule.name
- end.
-
-%% =====================================================================
-%% @doc Returns the list of clause subtrees of a `rule' node.
-%%
-%% @see rule/2
-
--spec rule_clauses(syntaxTree()) -> [syntaxTree()].
-
-rule_clauses(Node) ->
- case unwrap(Node) of
- {rule, _, _, _, Clauses} ->
- Clauses;
- Node1 ->
- (data(Node1))#rule.clauses
- end.
-
-%% =====================================================================
-%% @doc Returns the arity of a `rule' node. The result is the
-%% number of parameter patterns in the first clause of the rule;
-%% subsequent clauses are ignored.
-%%
-%% An exception is thrown if `rule_clauses(Node)' returns
-%% an empty list, or if the first element of that list is not a syntax
-%% tree `C' of type `clause' such that
-%% `clause_patterns(C)' is a nonempty list.
-%%
-%% @see rule/2
-%% @see rule_clauses/1
-%% @see clause/3
-%% @see clause_patterns/1
-
--spec rule_arity(syntaxTree()) -> arity().
-
-rule_arity(Node) ->
- %% Note that this never accesses the arity field of
- %% `erl_parse' rule nodes.
- length(clause_patterns(hd(rule_clauses(Node)))).
-
-
-%% =====================================================================
%% @doc Creates an abstract generator. The result represents
%% "<code><em>Pattern</em> &lt;- <em>Body</em></code>".
%%
@@ -6403,8 +6244,6 @@ revert_root(Node) ->
revert_record_expr(Node);
record_index_expr ->
revert_record_index_expr(Node);
- rule ->
- revert_rule(Node);
string ->
revert_string(Node);
try_expr ->
@@ -6661,15 +6500,9 @@ subtrees(T) ->
receive_expr_action(T)]
end;
record_access ->
- case record_access_type(T) of
- none ->
- [[record_access_argument(T)],
- [record_access_field(T)]];
- R ->
- [[record_access_argument(T)],
- [R],
- [record_access_field(T)]]
- end;
+ [[record_access_argument(T)],
+ [record_access_type(T)],
+ [record_access_field(T)]];
record_expr ->
case record_expr_argument(T) of
none ->
@@ -6690,8 +6523,6 @@ subtrees(T) ->
record_index_expr ->
[[record_index_expr_type(T)],
[record_index_expr_field(T)]];
- rule ->
- [[rule_name(T)], rule_clauses(T)];
size_qualifier ->
[[size_qualifier_body(T)],
[size_qualifier_argument(T)]];
@@ -6786,8 +6617,6 @@ make_tree(parentheses, [[E]]) -> parentheses(E);
make_tree(prefix_expr, [[F], [A]]) -> prefix_expr(F, A);
make_tree(receive_expr, [C]) -> receive_expr(C);
make_tree(receive_expr, [C, [E], A]) -> receive_expr(C, E, A);
-make_tree(record_access, [[E], [F]]) ->
- record_access(E, F);
make_tree(record_access, [[E], [T], [F]]) ->
record_access(E, T, F);
make_tree(record_expr, [[T], F]) -> record_expr(T, F);
@@ -6796,7 +6625,6 @@ make_tree(record_field, [[N]]) -> record_field(N);
make_tree(record_field, [[N], [E]]) -> record_field(N, E);
make_tree(record_index_expr, [[T], [F]]) ->
record_index_expr(T, F);
-make_tree(rule, [[N], C]) -> rule(N, C);
make_tree(size_qualifier, [[N], [A]]) -> size_qualifier(N, A);
make_tree(try_expr, [B, C, H, A]) -> try_expr(B, C, H, A);
make_tree(tuple, [E]) -> tuple(E).
diff --git a/lib/syntax_tools/src/erl_syntax_lib.erl b/lib/syntax_tools/src/erl_syntax_lib.erl
index 2f0488abec..5b5b18d15b 100644
--- a/lib/syntax_tools/src/erl_syntax_lib.erl
+++ b/lib/syntax_tools/src/erl_syntax_lib.erl
@@ -35,8 +35,7 @@
analyze_function_name/1, analyze_implicit_fun/1,
analyze_import_attribute/1, analyze_module_attribute/1,
analyze_record_attribute/1, analyze_record_expr/1,
- analyze_record_field/1, analyze_rule/1,
- analyze_wild_attribute/1, annotate_bindings/1,
+ analyze_record_field/1, analyze_wild_attribute/1, annotate_bindings/1,
annotate_bindings/2, fold/3, fold_subtrees/3, foldl_listlist/3,
function_name_expansions/1, is_fail_expr/1, limit/2, limit/3,
map/2, map_subtrees/2, mapfold/3, mapfold_subtrees/3,
@@ -527,8 +526,6 @@ vann(Tree, Env) ->
vann_try_expr(Tree, Env);
function ->
vann_function(Tree, Env);
- rule ->
- vann_rule(Tree, Env);
fun_expr ->
vann_fun_expr(Tree, Env);
list_comp ->
@@ -569,15 +566,6 @@ vann_function(Tree, Env) ->
Bound = [],
{ann_bindings(Tree1, Env, Bound, Free), Bound, Free}.
-vann_rule(Tree, Env) ->
- Cs = erl_syntax:rule_clauses(Tree),
- {Cs1, {_, Free}} = vann_clauses(Cs, Env),
- N = erl_syntax:rule_name(Tree),
- {N1, _, _} = vann(N, Env),
- Tree1 = rewrite(Tree, erl_syntax:rule(N1, Cs1)),
- Bound = [],
- {ann_bindings(Tree1, Env, Bound, Free), Bound, Free}.
-
vann_fun_expr(Tree, Env) ->
Cs = erl_syntax:fun_expr_clauses(Tree),
{Cs1, {_, Free}} = vann_clauses(Cs, Env),
@@ -946,7 +934,7 @@ is_fail_expr(E) ->
%%
%% Forms = syntaxTree() | [syntaxTree()]
%% Key = attributes | errors | exports | functions | imports
-%% | module | records | rules | warnings
+%% | module | records | warnings
%%
%% @doc Analyzes a sequence of "program forms". The given
%% `Forms' may be a single syntax tree of type
@@ -1047,16 +1035,6 @@ is_fail_expr(E) ->
%% that each record name occurs at most once in the list. The
%% order of listing is not defined.</dd>
%%
-%% <dt>`{rules, Rules}'</dt>
-%% <dd><ul>
-%% <li>`Rules = [{atom(), integer()}]'</li>
-%% </ul>
-%% `Rules' is a list of the names of the rules that are
-%% defined in `Forms' (cf.
-%% `analyze_rule/1'). We do not guarantee that each
-%% name occurs at most once in the list. The order of listing is
-%% not defined.</dd>
-%%
%% <dt>`{warnings, Warnings}'</dt>
%% <dd><ul>
%% <li>`Warnings = [term()]'</li>
@@ -1074,12 +1052,11 @@ is_fail_expr(E) ->
%% @see analyze_import_attribute/1
%% @see analyze_record_attribute/1
%% @see analyze_function/1
-%% @see analyze_rule/1
%% @see erl_syntax:error_marker_info/1
%% @see erl_syntax:warning_marker_info/1
-type key() :: 'attributes' | 'errors' | 'exports' | 'functions' | 'imports'
- | 'module' | 'records' | 'rules' | 'warnings'.
+ | 'module' | 'records' | 'warnings'.
-type info_pair() :: {key(), term()}.
-spec analyze_forms(erl_syntax:forms()) -> [info_pair()].
@@ -1099,8 +1076,6 @@ collect_form(Node, Info) ->
Info;
{function, Name} ->
finfo_add_function(Name, Info);
- {rule, Name} ->
- finfo_add_rule(Name, Info);
{error_marker, Data} ->
finfo_add_error(Data, Info);
{warning_marker, Data} ->
@@ -1136,8 +1111,7 @@ collect_attribute(_, {N, V}, Info) ->
records = [] :: [{atom(), [{atom(), field_default()}]}],
errors = [] :: [term()],
warnings = [] :: [term()],
- functions = [] :: [{atom(), arity()}],
- rules = [] :: [{atom(), arity()}]}).
+ functions = [] :: [{atom(), arity()}]}).
-type field_default() :: 'none' | erl_syntax:syntaxTree().
@@ -1183,9 +1157,6 @@ finfo_add_warning(R, Info) ->
finfo_add_function(F, Info) ->
Info#forms{functions = [F | Info#forms.functions]}.
-finfo_add_rule(F, Info) ->
- Info#forms{rules = [F | Info#forms.rules]}.
-
finfo_to_list(Info) ->
[{Key, Value}
|| {Key, {value, Value}} <-
@@ -1197,8 +1168,7 @@ finfo_to_list(Info) ->
{records, list_value(Info#forms.records)},
{errors, list_value(Info#forms.errors)},
{warnings, list_value(Info#forms.warnings)},
- {functions, list_value(Info#forms.functions)},
- {rules, list_value(Info#forms.rules)}
+ {functions, list_value(Info#forms.functions)}
]].
list_value([]) ->
@@ -1229,10 +1199,6 @@ list_value(List) ->
%%
%% <dd>where `Info = analyze_function(Node)'.</dd>
%%
-%% <dt>`{rule, Info}'</dt>
-%%
-%% <dd>where `Info = analyze_rule(Node)'.</dd>
-%%
%% <dt>`{warning_marker, Info}'</dt>
%%
%% <dd>where `Info =
@@ -1245,7 +1211,6 @@ list_value(List) ->
%%
%% @see analyze_attribute/1
%% @see analyze_function/1
-%% @see analyze_rule/1
%% @see erl_syntax:is_form/1
%% @see erl_syntax:error_marker_info/1
%% @see erl_syntax:warning_marker_info/1
@@ -1258,8 +1223,6 @@ analyze_form(Node) ->
{attribute, analyze_attribute(Node)};
function ->
{function, analyze_function(Node)};
- rule ->
- {rule, analyze_rule(Node)};
error_marker ->
{error_marker, erl_syntax:error_marker_info(Node)};
warning_marker ->
@@ -1669,7 +1632,7 @@ analyze_record_attribute_tuple(Node) ->
%% <dt>`record_expr':</dt>
%% <dd>`{atom(), [{atom(), Value}]}'</dd>
%% <dt>`record_access':</dt>
-%% <dd>`{atom(), atom()} | atom()'</dd>
+%% <dd>`{atom(), atom()}'</dd>
%% <dt>`record_index_expr':</dt>
%% <dd>`{atom(), atom()}'</dd>
%% </dl>
@@ -1679,9 +1642,7 @@ analyze_record_attribute_tuple(Node) ->
%% listed in the order they appear. (See
%% `analyze_record_field/1' for details on the field
%% descriptors). For a `record_access' node,
-%% `Info' represents the record name and the field name (or
-%% if the record name is not included, only the field name; this is
-%% allowed only in Mnemosyne-query syntax). For a
+%% `Info' represents the record name and the field name. For a
%% `record_index_expr' node, `Info' represents the
%% record name and the name field name.
%%
@@ -1713,18 +1674,14 @@ analyze_record_expr(Node) ->
F = erl_syntax:record_access_field(Node),
case erl_syntax:type(F) of
atom ->
- case erl_syntax:record_access_type(Node) of
- none ->
- {record_access, erl_syntax:atom_value(F)};
- A ->
- case erl_syntax:type(A) of
- atom ->
- {record_access,
- {erl_syntax:atom_value(A),
- erl_syntax:atom_value(F)}};
- _ ->
- throw(syntax_error)
- end
+ A = erl_syntax:record_access_type(Node),
+ case erl_syntax:type(A) of
+ atom ->
+ {record_access,
+ {erl_syntax:atom_value(A),
+ erl_syntax:atom_value(F)}};
+ _ ->
+ throw(syntax_error)
end;
_ ->
throw(syntax_error)
@@ -1835,8 +1792,6 @@ analyze_file_attribute(Node) ->
%% The evaluation throws `syntax_error' if
%% `Node' does not represent a well-formed function
%% definition.
-%%
-%% @see analyze_rule/1
-spec analyze_function(erl_syntax:syntaxTree()) -> {atom(), arity()}.
@@ -1857,37 +1812,6 @@ analyze_function(Node) ->
%% =====================================================================
-%% @spec analyze_rule(Node::syntaxTree()) -> {atom(), integer()}
-%%
-%% @doc Returns the name and arity of a Mnemosyne rule. The result is a
-%% pair `{Name, A}' if `Node' represents a rule
-%% "`Name(<em>P_1</em>, ..., <em>P_A</em>) :- ...'".
-%%
-%% The evaluation throws `syntax_error' if
-%% `Node' does not represent a well-formed Mnemosyne
-%% rule.
-%%
-%% @see analyze_function/1
-
--spec analyze_rule(erl_syntax:syntaxTree()) -> {atom(), arity()}.
-
-analyze_rule(Node) ->
- case erl_syntax:type(Node) of
- rule ->
- N = erl_syntax:rule_name(Node),
- case erl_syntax:type(N) of
- atom ->
- {erl_syntax:atom_value(N),
- erl_syntax:rule_arity(Node)};
- _ ->
- throw(syntax_error)
- end;
- _ ->
- throw(syntax_error)
- end.
-
-
-%% =====================================================================
%% @spec analyze_implicit_fun(Node::syntaxTree()) -> FunctionName
%%
%% FunctionName = atom() | {atom(), integer()}
diff --git a/lib/syntax_tools/src/erl_tidy.erl b/lib/syntax_tools/src/erl_tidy.erl
index 38e0c2099b..db7f0939a3 100644
--- a/lib/syntax_tools/src/erl_tidy.erl
+++ b/lib/syntax_tools/src/erl_tidy.erl
@@ -792,16 +792,11 @@ keep_form(Form, Used, Opts) ->
N = erl_syntax_lib:analyze_function(Form),
case sets:is_element(N, Used) of
false ->
- report_removed_def("function", N, Form, Opts),
- false;
- true ->
- true
- end;
- rule ->
- N = erl_syntax_lib:analyze_rule(Form),
- case sets:is_element(N, Used) of
- false ->
- report_removed_def("rule", N, Form, Opts),
+ {F, A} = N,
+ File = proplists:get_value(file, Opts, ""),
+ report({File, erl_syntax:get_pos(Form),
+ "removing unused function `~w/~w'."},
+ [F, A], Opts),
false;
true ->
true
@@ -823,12 +818,6 @@ keep_form(Form, Used, Opts) ->
true
end.
-report_removed_def(Type, {N, A}, Form, Opts) ->
- File = proplists:get_value(file, Opts, ""),
- report({File, erl_syntax:get_pos(Form),
- "removing unused ~s `~w/~w'."},
- [Type, N, A], Opts).
-
collect_functions(Forms) ->
lists:foldl(
fun (F, {Names, Defs}) ->
@@ -837,10 +826,6 @@ collect_functions(Forms) ->
N = erl_syntax_lib:analyze_function(F),
{sets:add_element(N, Names),
dict:store(N, {F, []}, Defs)};
- rule ->
- N = erl_syntax_lib:analyze_rule(F),
- {sets:add_element(N, Names),
- dict:store(N, {F, []}, Defs)};
_ ->
{Names, Defs}
end
@@ -855,11 +840,6 @@ update_forms([F | Fs], Defs, Imports, Opts) ->
{F1, Fs1} = dict:fetch(N, Defs),
[F1 | lists:reverse(Fs1)] ++ update_forms(Fs, Defs, Imports,
Opts);
- rule ->
- N = erl_syntax_lib:analyze_rule(F),
- {F1, Fs1} = dict:fetch(N, Defs),
- [F1 | lists:reverse(Fs1)] ++ update_forms(Fs, Defs, Imports,
- Opts);
attribute ->
[update_attribute(F, Imports, Opts)
| update_forms(Fs, Defs, Imports, Opts)];
diff --git a/lib/syntax_tools/src/igor.erl b/lib/syntax_tools/src/igor.erl
index 0420508f2a..eac5af5540 100644
--- a/lib/syntax_tools/src/igor.erl
+++ b/lib/syntax_tools/src/igor.erl
@@ -1713,8 +1713,6 @@ transform(Tree, Env, St) ->
transform_function(Tree, Env, St);
implicit_fun ->
transform_implicit_fun(Tree, Env, St);
- rule ->
- transform_rule(Tree, Env, St);
record_expr ->
transform_record(Tree, Env, St);
record_index_expr ->
@@ -1778,27 +1776,6 @@ renaming_note(Name) ->
rename_atom(Node, Atom) ->
rewrite(Node, erl_syntax:atom(Atom)).
-%% Renaming Mnemosyne rules (just like function definitions)
-
-transform_rule(T, Env, St) ->
- {T1, St1} = default_transform(T, Env, St),
- F = erl_syntax_lib:analyze_rule(T1),
- {V, Text} = case (Env#code.map)(F) of
- F ->
- %% Not renamed
- {none, []};
- {Atom, _Arity} ->
- %% Renamed
- Cs = erl_syntax:rule_clauses(T1),
- N = rename_atom(
- erl_syntax:rule_name(T1),
- Atom),
- T2 = rewrite(T1,
- erl_syntax:rule(N, Cs)),
- {{value, T2}, renaming_note(Atom)}
- end,
- {maybe_modified(V, T1, 2, Text, Env), St1}.
-
%% Renaming "implicit fun" expressions (done quietly).
transform_implicit_fun(T, Env, St) ->
diff --git a/lib/test_server/src/erl2html2.erl b/lib/test_server/src/erl2html2.erl
index b9b45cda25..7cfaa2c325 100644
--- a/lib/test_server/src/erl2html2.erl
+++ b/lib/test_server/src/erl2html2.erl
@@ -22,11 +22,11 @@
%%%------------------------------------------------------------------
-module(erl2html2).
--export([convert/2, convert/3]).
+-export([convert/3, convert/4]).
-convert([], _Dest) -> % Fake clause.
+convert([], _Dest, _InclPath) -> % Fake clause.
ok;
-convert(File, Dest) ->
+convert(File, Dest, InclPath) ->
%% The generated code uses the BGCOLOR attribute in the
%% BODY tag, which wasn't valid until HTML 3.2. Also,
%% good HTML should either override all colour attributes
@@ -48,12 +48,12 @@ convert(File, Dest) ->
"</head>\n\n"
"<body bgcolor=\"white\" text=\"black\""
" link=\"blue\" vlink=\"purple\" alink=\"red\">\n"],
- convert(File, Dest, Header).
+ convert(File, Dest, InclPath, Header).
-convert(File, Dest, Header) ->
+convert(File, Dest, InclPath, Header) ->
%% statistics(runtime),
- case parse_file(File) of
+ case parse_file(File, InclPath) of
{ok,Functions} ->
%% {_, Time1} = statistics(runtime),
%% io:format("Parsed file in ~.2f Seconds.~n",[Time1/1000]),
@@ -92,8 +92,8 @@ convert(File, Dest, Header) ->
%%% Use expanded preprocessor directives if possible (epp). Only if
%%% this fails, fall back on using non-expanded code (epp_dodger).
-parse_file(File) ->
- case epp:open(File, [], []) of
+parse_file(File, InclPath) ->
+ case epp:open(File, InclPath, []) of
{ok,Epp} ->
try parse_preprocessed_file(Epp,File,false) of
Forms ->
@@ -145,13 +145,15 @@ parse_non_preprocessed_file(File) ->
parse_non_preprocessed_file(Epp, File, Location) ->
case epp_dodger:parse_form(Epp, Location) of
{ok,Tree,Location1} ->
- case erl_syntax:revert(Tree) of
+ try erl_syntax:revert(Tree) of
{function,L,F,A,[_|C]} ->
Clauses = [{clause,CL} || {clause,CL,_,_,_} <- C],
[{atom_to_list(F),A,L} | Clauses] ++
parse_non_preprocessed_file(Epp, File, Location1);
_ ->
parse_non_preprocessed_file(Epp, File, Location1)
+ catch
+ _:_ -> parse_non_preprocessed_file(Epp, File, Location1)
end;
{error,_E,Location1} ->
parse_non_preprocessed_file(Epp, File, Location1);
diff --git a/lib/test_server/src/test_server.erl b/lib/test_server/src/test_server.erl
index 9192a76a17..1d989ce9c8 100644
--- a/lib/test_server/src/test_server.erl
+++ b/lib/test_server/src/test_server.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -177,68 +177,35 @@ module_names(Beams) ->
do_cover_compile(Modules) ->
cover:start(),
- pmap1(fun(M) -> do_cover_compile1(M) end,lists:usort(Modules)),
+ Sticky = prepare_cover_compile(Modules,[]),
+ R = cover:compile_beam(Modules),
+ [warn_compile(Error) || Error <- R,element(1,Error)=/=ok],
+ [code:stick_mod(M) || M <- Sticky],
ok.
-do_cover_compile1(M) ->
+warn_compile({error,{Reason,Module}}) ->
+ io:fwrite("\nWARNING: Could not cover compile ~ts: ~p\n",
+ [Module,{error,Reason}]).
+
+%% Make sure all modules are loaded and unstick if sticky
+prepare_cover_compile([M|Ms],Sticky) ->
case {code:is_sticky(M),code:is_loaded(M)} of
{true,_} ->
code:unstick_mod(M),
- case cover:compile_beam(M) of
- {ok,_} ->
- ok;
- Error ->
- io:fwrite("\nWARNING: Could not cover compile ~w: ~p\n",
- [M,Error])
- end,
- code:stick_mod(M);
+ prepare_cover_compile(Ms,[M|Sticky]);
{false,false} ->
case code:load_file(M) of
{module,_} ->
- do_cover_compile1(M);
+ prepare_cover_compile([M|Ms],Sticky);
Error ->
- io:fwrite("\nWARNING: Could not load ~w: ~p\n",[M,Error])
+ io:fwrite("\nWARNING: Could not load ~w: ~p\n",[M,Error]),
+ prepare_cover_compile(Ms,Sticky)
end;
{false,_} ->
- case cover:compile_beam(M) of
- {ok,_} ->
- ok;
- Error ->
- io:fwrite("\nWARNING: Could not cover compile ~w: ~p\n",
- [M,Error])
- end
- end.
-
-pmap1(Fun,List) ->
- NTot = length(List),
- NProcs = erlang:system_info(schedulers) * 2,
- NPerProc = (NTot div NProcs) + 1,
-
- {[],Pids} =
- lists:foldr(
- fun(_,{L,Ps}) ->
- {L1,L2} = if length(L)>=NPerProc -> lists:split(NPerProc,L);
- true -> {L,[]} % last chunk
- end,
- {P,_Ref} =
- spawn_monitor(fun() ->
- exit(lists:map(Fun,L1))
- end),
- {L2,[P|Ps]}
- end,
- {List,[]},
- lists:seq(1,NProcs)),
- collect(Pids,[]).
-
-collect([],Acc) ->
- lists:append(Acc);
-collect([Pid|Pids],Acc) ->
- receive
- {'DOWN', _Ref, process, Pid, Result} ->
- %% collect(lists:delete(Pid,Pids),[Result|Acc])
- collect(Pids,[Result|Acc])
- end.
-
+ prepare_cover_compile(Ms,Sticky)
+ end;
+prepare_cover_compile([],Sticky) ->
+ Sticky.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% cover_analyse(Dir,#cover{level=Analyse,mods=Modules,stop=Stop) ->
@@ -268,45 +235,40 @@ collect([Pid|Pids],Acc) ->
%% after the test is completed.
cover_analyse(Dir,#cover{level=Analyse,mods=Modules,stop=Stop}) ->
io:fwrite(user, "Cover analysing... ", []),
- DetailsFun =
+ {ATFOk,ATFFail} =
case Analyse of
details ->
case cover:export(filename:join(Dir,"all.coverdata")) of
ok ->
- fun(M) ->
- OutFile = filename:join(Dir,
- atom_to_list(M) ++
- ".COVER.html"),
- case cover:analyse_to_file(M,OutFile,[html]) of
- {ok,_} ->
- {file,OutFile};
- Error ->
- Error
- end
- end;
+ {result,Ok1,Fail1} =
+ cover:analyse_to_file(Modules,[{outdir,Dir},html]),
+ {lists:map(fun(OutFile) ->
+ M = list_to_atom(
+ filename:basename(
+ filename:rootname(OutFile,
+ ".COVER.html")
+ )
+ ),
+ {M,{file,OutFile}}
+ end, Ok1),
+ lists:map(fun({Reason,M}) ->
+ {M,{error,Reason}}
+ end, Fail1)};
Error ->
- fun(_) -> Error end
+ {[],lists:map(fun(M) -> {M,Error} end, Modules)}
end;
overview ->
case cover:export(filename:join(Dir,"all.coverdata")) of
ok ->
- fun(_) -> undefined end;
+ {[],lists:map(fun(M) -> {M,undefined} end, Modules)};
Error ->
- fun(_) -> Error end
+ {[],lists:map(fun(M) -> {M,Error} end, Modules)}
end
end,
- R = pmap2(
- fun(M) ->
- case cover:analyse(M,module) of
- {ok,{M,{Cov,NotCov}}} ->
- {M,{Cov,NotCov,DetailsFun(M)}};
- Err ->
- io:fwrite(user,
- "\nWARNING: Analysis failed for ~w. Reason: ~p\n",
- [M,Err]),
- {M,Err}
- end
- end, Modules),
+ {result,AOk,AFail} = cover:analyse(Modules,module),
+ R = merge_analysis_results(AOk,ATFOk++ATFFail,[]) ++
+ [{M,{error,Reason}} || {Reason,M} <- AFail],
+
io:fwrite(user, "done\n\n", []),
case Stop of
@@ -319,19 +281,15 @@ cover_analyse(Dir,#cover{level=Analyse,mods=Modules,stop=Stop}) ->
end,
R.
-pmap2(Fun,List) ->
- Collector = self(),
- Pids = lists:map(fun(E) ->
- spawn(fun() ->
- Collector ! {res,self(),Fun(E)}
- end)
- end, List),
- lists:map(fun(Pid) ->
- receive
- {res,Pid,Res} ->
- Res
- end
- end, Pids).
+merge_analysis_results([{M,{Cov,NotCov}}|T],ATF,Acc) ->
+ case lists:keytake(M,1,ATF) of
+ {value,{_,R},ATF1} ->
+ merge_analysis_results(T,ATF1,[{M,{Cov,NotCov,R}}|Acc]);
+ false ->
+ merge_analysis_results(T,ATF,Acc)
+ end;
+merge_analysis_results([],_,Acc) ->
+ Acc.
do_cover_for_node(Node,CoverFunc) ->
do_cover_for_node(Node,CoverFunc,true).
diff --git a/lib/test_server/src/test_server_ctrl.erl b/lib/test_server/src/test_server_ctrl.erl
index af8921fe75..488f38d05d 100644
--- a/lib/test_server/src/test_server_ctrl.erl
+++ b/lib/test_server/src/test_server_ctrl.erl
@@ -1927,15 +1927,20 @@ html_possibly_convert(Src, SrcInfo, Dest) ->
{ok,DestInfo} when DestInfo#file_info.mtime >= SrcInfo#file_info.mtime ->
ok; % dest file up to date
_ ->
+ InclPath = case application:get_env(test_server, include) of
+ {ok,Incls} -> Incls;
+ _ -> []
+ end,
+
OutDir = get(test_server_log_dir_base),
case test_server_sup:framework_call(get_html_wrapper,
["Module "++Src,false,
OutDir,undefined,
encoding(Src)], "") of
Empty when (Empty == "") ; (element(2,Empty) == "") ->
- erl2html2:convert(Src, Dest);
+ erl2html2:convert(Src, Dest, InclPath);
{_,Header,_} ->
- erl2html2:convert(Src, Dest, Header)
+ erl2html2:convert(Src, Dest, InclPath, Header)
end
end.
diff --git a/lib/test_server/test/erl2html2_SUITE.erl b/lib/test_server/test/erl2html2_SUITE.erl
index 37c2b74d8e..908985c879 100644
--- a/lib/test_server/test/erl2html2_SUITE.erl
+++ b/lib/test_server/test/erl2html2_SUITE.erl
@@ -161,7 +161,7 @@ convert_module(Mod,Config) ->
Src = filename:join(DataDir,Mod++".erl"),
Dst = filename:join(PrivDir,Mod++".erl.html"),
io:format("<a href=\"~s\">~s</a>\n",[Src,filename:basename(Src)]),
- ok = erl2html2:convert(Src, Dst, "<html><body>"),
+ ok = erl2html2:convert(Src, Dst, [], "<html><body>"),
io:format("<a href=\"~s\">~s</a>\n",[Dst,filename:basename(Dst)]),
{Src,Dst}.
diff --git a/lib/tools/doc/src/cover.xml b/lib/tools/doc/src/cover.xml
index 07ffa65e3d..914baa7977 100644
--- a/lib/tools/doc/src/cover.xml
+++ b/lib/tools/doc/src/cover.xml
@@ -5,7 +5,7 @@
<header>
<copyright>
<year>2001</year>
- <year>2013</year>
+ <year>2015</year>
<holder>Ericsson AB, All Rights Reserved</holder>
</copyright>
<legalnotice>
@@ -138,17 +138,18 @@
</desc>
</func>
<func>
- <name>compile(ModFile) -> Result</name>
- <name>compile(ModFile, Options) -> Result</name>
- <name>compile_module(ModFile) -> Result</name>
- <name>compile_module(ModFile, Options) -> Result</name>
- <fsummary>Compile a module for Cover analysis.</fsummary>
+ <name>compile(ModFiles) -> Result | [Result]</name>
+ <name>compile(ModFiles, Options) -> Result | [Result]</name>
+ <name>compile_module(ModFiles) -> Result | [Result]</name>
+ <name>compile_module(ModFiles, Options) -> Result | [Result]</name>
+ <fsummary>Compile one or more modules for Cover analysis.</fsummary>
<type>
+ <v>ModFiles = ModFile | [ModFile]</v>
<v>ModFile = Module | File</v>
<v>&nbsp;Module = atom()</v>
<v>&nbsp;File = string()</v>
<v>Options = [Option]</v>
- <v>&nbsp;Option = {i,Dir} | {d,Macro} | {d,Macro,Value}</v>
+ <v>&nbsp;Option = {i,Dir} | {d,Macro} | {d,Macro,Value} | export_all</v>
<d>See <c>compile:file/2.</c></d>
<v>Result = {ok,Module} | {error,File} | {error,not_main_node}</v>
</type>
@@ -165,6 +166,9 @@
returns <c>{ok,Module}</c>. Otherwise the function returns
<c>{error,File}</c>. Errors and warnings are printed as they
occur.</p>
+ <p>If a list of <c>ModFiles</c> is given as input, a list
+ of <c>Result</c> will be returned. The order of the returned
+ list is undefined.</p>
<p>Note that the internal database is (re-)initiated during
the compilation, meaning any previously collected coverage data
for the module will be lost.</p>
@@ -194,9 +198,10 @@
</desc>
</func>
<func>
- <name>compile_beam(ModFile) -> Result</name>
- <fsummary>Compile a module for Cover analysis, using an existing beam.</fsummary>
+ <name>compile_beam(ModFiles) -> Result | [Result]</name>
+ <fsummary>Compile one or more modules for Cover analysis, using existing beam(s).</fsummary>
<type>
+ <v>ModFiles = ModFile | [ModFile]</v>
<v>ModFile = Module | BeamFile</v>
<v>&nbsp;Module = atom()</v>
<v>&nbsp;BeamFile = string()</v>
@@ -229,6 +234,9 @@
returned.</p>
<p><c>{error,BeamFile}</c> is returned if the compiled code
can not be loaded on the node.</p>
+ <p>If a list of <c>ModFiles</c> is given as input, a list
+ of <c>Result</c> will be returned. The order of the returned
+ list is undefined.</p>
</desc>
</func>
<func>
@@ -251,16 +259,21 @@
</desc>
</func>
<func>
- <name>analyse(Module) -> {ok,Answer} | {error,Error}</name>
- <name>analyse(Module, Analysis) -> {ok,Answer} | {error,Error}</name>
- <name>analyse(Module, Level) -> {ok,Answer} | {error,Error}</name>
- <name>analyse(Module, Analysis, Level) -> {ok,Answer} | {error,Error}</name>
- <fsummary>Analyse a Cover compiled module.</fsummary>
+ <name>analyse() -> {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse(Modules) -> OneResult | {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse(Analysis) -> {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse(Level) -> {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse(Modules, Analysis) -> OneResult | {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse(Modules, Level) -> OneResult | {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse(Analysis, Level) -> {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse(Modules, Analysis, Level) -> OneResult | {result,Ok,Fail} | {error,not_main_node}</name>
+ <fsummary>Analyse one or more Cover compiled modules.</fsummary>
<type>
- <v>Module = atom()</v>
+ <v>Modules = Module | [Module]</v>
+ <v>Module = atom() </v>
<v>Analysis = coverage | calls</v>
<v>Level = line | clause | function | module</v>
- <v>Answer = {Module,Value} | [{Item,Value}]</v>
+ <v>OneResult = {ok,{Module,Value}} | {ok,[{Item,Value}]} | {error, Error}</v>
<v>&nbsp;Item = Line | Clause | Function</v>
<v>&nbsp;&nbsp;Line = {M,N}</v>
<v>&nbsp;&nbsp;Clause = {M,F,A,C}</v>
@@ -269,49 +282,67 @@
<v>&nbsp;&nbsp;&nbsp;N = A = C = integer()</v>
<v>&nbsp;Value = {Cov,NotCov} | Calls</v>
<v>&nbsp;&nbsp;Cov = NotCov = Calls = integer()</v>
- <v>Error = {not_cover_compiled,Module} | not_main_node</v>
+ <v>&nbsp;Error = {not_cover_compiled,Module}</v>
+ <v>Ok = [{Module,Value}] | [{Item,Value}]</v>
+ <v>Fail = [Error]</v>
</type>
<desc>
- <p>Performs analysis of a Cover compiled module <c>Module</c>, as
+ <p>Performs analysis of one or more Cover compiled modules, as
specified by <c>Analysis</c> and <c>Level</c> (see above), by
examining the contents of the internal database.</p>
<p><c>Analysis</c> defaults to <c>coverage</c> and <c>Level</c>
defaults to <c>function</c>.</p>
- <p>If <c>Module</c> is not Cover compiled, the function returns
- <c>{error,{not_cover_compiled,Module}}</c>.</p>
- <p>HINT: It is possible to issue multiple analyse_to_file commands at
- the same time. </p>
+ <p>If <c>Modules</c> is an atom (one module), the return will
+ be <c>OneResult</c>, else the return will be
+ <c>{result,Ok,Fail}</c>.</p>
+ <p>If <c>Modules</c> is not given, all modules that have data
+ in the cover data table, are analysed. Note that this
+ includes both cover compiled modules and imported
+ modules.</p>
+ <p>If a given module is not Cover compiled, this is indicated
+ by the error reason <c>{not_cover_compiled,Module}</c>.</p>
</desc>
</func>
<func>
- <name>analyse_to_file(Module) -> </name>
- <name>analyse_to_file(Module,Options) -> </name>
- <name>analyse_to_file(Module, OutFile) -> </name>
- <name>analyse_to_file(Module, OutFile, Options) -> {ok,OutFile} | {error,Error}</name>
- <fsummary>Detailed coverage analysis of a Cover compiled module.</fsummary>
+ <name>analyse_to_file() -> {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse_to_file(Modules) -> Answer | {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse_to_file(Options) -> {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse_to_file(Modules,Options) -> Answer | {result,Ok,Fail} | {error,not_main_node}</name>
+ <fsummary>Detailed coverage analysis of one or more Cover compiled modules.</fsummary>
<type>
+ <v>Modules = Module | [Module]</v>
<v>Module = atom()</v>
- <v>OutFile = string()</v>
+ <v>OutFile = OutDir = string()</v>
<v>Options = [Option]</v>
- <v>Option = html</v>
- <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | no_source_code_found | not_main_node</v>
+ <v>Option = html | {outfile,OutFile} | {outdir,OutDir}</v>
+ <v>Answer = {ok,OutFile} | {error,Error}</v>
+ <v>Ok = [OutFile]</v>
+ <v>Fail = [Error]</v>
+ <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | {no_source_code_found,Module}</v>
<v>&nbsp;File = string()</v>
<v>&nbsp;Reason = term()</v>
</type>
<desc>
- <p>Makes a copy <c>OutFile</c> of the source file for a module
- <c>Module</c>, where it for each executable line is specified
+ <p>Makes copies of the source file for the given modules,
+ where it for each executable line is specified
how many times it has been executed.</p>
<p>The output file <c>OutFile</c> defaults to
<c>Module.COVER.out</c>, or <c>Module.COVER.html</c> if the
option <c>html</c> was used.</p>
- <p>If <c>Module</c> is not Cover compiled, the function returns
- <c>{error,{not_cover_compiled,Module}}</c>.</p>
+ <p>If <c>Modules</c> is an atom (one module), the return will
+ be <c>Answer</c>, else the return will be a
+ list, <c>{result,Ok,Fail}</c>.</p>
+ <p>If <c>Modules</c> is not given, all modules that have data
+ in the cover data table, are analysed. Note that this
+ includes both cover compiled modules and imported
+ modules.</p>
+ <p>If a module is not Cover compiled, this is indicated by the
+ error reason <c>{not_cover_compiled,Module}</c>.</p>
<p>If the source file and/or the output file cannot be opened using
<c>file:open/2</c>, the function returns
<c>{error,{file,File,Reason}}</c> where <c>File</c> is the file
name and <c>Reason</c> is the error reason.</p>
- <p>If the module was cover compiled from the <c>.beam</c>
+ <p>If a module was cover compiled from the <c>.beam</c>
file, i.e. using <c>compile_beam/1</c> or
<c>compile_beam_directory/0,1</c>, it is assumed that the
source code can be found in the same directory as the
@@ -322,10 +353,8 @@
joining <c>../src</c> and the tail of the compiled path
below a trailing <c>src</c> component, then the compiled
path itself.
- If no source code is found,
- <c>{error,no_source_code_found}</c> is returned.</p>
- <p>HINT: It is possible to issue multiple analyse_to_file commands at
- the same time. </p>
+ If no source code is found, this is indicated by the error reason
+ <c>{no_source_code_found,Module}</c>.</p>
</desc>
</func>
<func>
@@ -339,7 +368,7 @@
<v>OutFile = string()</v>
<v>Options = [Option]</v>
<v>Option = html</v>
- <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | no_source_code_found | not_main_node</v>
+ <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | {no_source_code_found,Module} | not_main_node</v>
<v>&nbsp;File = string()</v>
<v>&nbsp;Reason = term()</v>
</type>
diff --git a/lib/tools/src/cover.erl b/lib/tools/src/cover.erl
index 31754015f7..9a76084bc0 100644
--- a/lib/tools/src/cover.erl
+++ b/lib/tools/src/cover.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -77,8 +77,11 @@
compile/1, compile/2, compile_module/1, compile_module/2,
compile_directory/0, compile_directory/1, compile_directory/2,
compile_beam/1, compile_beam_directory/0, compile_beam_directory/1,
- analyse/1, analyse/2, analyse/3, analyze/1, analyze/2, analyze/3,
+ analyse/0, analyse/1, analyse/2, analyse/3,
+ analyze/0, analyze/1, analyze/2, analyze/3,
+ analyse_to_file/0,
analyse_to_file/1, analyse_to_file/2, analyse_to_file/3,
+ analyze_to_file/0,
analyze_to_file/1, analyze_to_file/2, analyze_to_file/3,
async_analyse_to_file/1,async_analyse_to_file/2,
async_analyse_to_file/3, async_analyze_to_file/1,
@@ -109,6 +112,7 @@
line = '_' % integer()
}).
-define(BUMP_REC_NAME,bump).
+-define(CHUNK_SIZE, 20000).
-record(vars, {module, % atom() Module name
@@ -181,10 +185,11 @@ start(Node) when is_atom(Node) ->
start(Nodes) ->
call({start_nodes,remove_myself(Nodes,[])}).
-%% compile(ModFile) ->
-%% compile(ModFile, Options) ->
-%% compile_module(ModFile) -> Result
-%% compile_module(ModFile, Options) -> Result
+%% compile(ModFiles) ->
+%% compile(ModFiles, Options) ->
+%% compile_module(ModFiles) -> Result
+%% compile_module(ModFiles, Options) -> Result
+%% ModFiles = ModFile | [ModFile]
%% ModFile = Module | File
%% Module = atom()
%% File = string()
@@ -198,18 +203,27 @@ compile(ModFile, Options) ->
compile_module(ModFile) when is_atom(ModFile);
is_list(ModFile) ->
compile_module(ModFile, []).
-compile_module(Module, Options) when is_atom(Module), is_list(Options) ->
- compile_module(atom_to_list(Module), Options);
-compile_module(File, Options) when is_list(File), is_list(Options) ->
- WithExt = case filename:extension(File) of
- ".erl" ->
- File;
- _ ->
- File++".erl"
- end,
- AbsFile = filename:absname(WithExt),
- [R] = compile_modules([AbsFile], Options),
- R.
+compile_module(ModFile, Options) when is_atom(ModFile);
+ is_list(ModFile), is_integer(hd(ModFile)) ->
+ [R] = compile_module([ModFile], Options),
+ R;
+compile_module(ModFiles, Options) when is_list(Options) ->
+ AbsFiles =
+ [begin
+ File =
+ case ModFile of
+ _ when is_atom(ModFile) -> atom_to_list(ModFile);
+ _ when is_list(ModFile) -> ModFile
+ end,
+ WithExt = case filename:extension(File) of
+ ".erl" ->
+ File;
+ _ ->
+ File++".erl"
+ end,
+ filename:absname(WithExt)
+ end || ModFile <- ModFiles],
+ compile_modules(AbsFiles, Options).
%% compile_directory() ->
%% compile_directory(Dir) ->
@@ -240,13 +254,14 @@ compile_directory(Dir, Options) when is_list(Dir), is_list(Options) ->
compile_modules(Files,Options) ->
Options2 = filter_options(Options),
- compile_modules(Files,Options2,[]).
+ %% compile_modules(Files,Options2,[]).
+ call({compile, Files, Options2}).
-compile_modules([File|Files], Options, Result) ->
- R = call({compile, File, Options}),
- compile_modules(Files,Options,[R|Result]);
-compile_modules([],_Opts,Result) ->
- lists:reverse(Result).
+%% compile_modules([File|Files], Options, Result) ->
+%% R = call({compile, File, Options}),
+%% compile_modules(Files,Options,[R|Result]);
+%% compile_modules([],_Opts,Result) ->
+%% lists:reverse(Result).
filter_options(Options) ->
lists:filter(fun(Option) ->
@@ -264,30 +279,17 @@ filter_options(Options) ->
%% ModFile - see compile/1
%% Result - see compile/1
%% Reason = non_existing | already_cover_compiled
-compile_beam(Module) when is_atom(Module) ->
- case code:which(Module) of
- non_existing ->
+compile_beam(ModFile0) when is_atom(ModFile0);
+ is_list(ModFile0), is_integer(hd(ModFile0)) ->
+ case compile_beams([ModFile0]) of
+ [{error,{non_existing,_}}] ->
+ %% Backwards compatibility
{error,non_existing};
- ?TAG ->
- compile_beam(Module,?TAG);
- File ->
- compile_beam(Module,File)
+ [Result] ->
+ Result
end;
-compile_beam(File) when is_list(File) ->
- {WithExt,WithoutExt}
- = case filename:rootname(File,".beam") of
- File ->
- {File++".beam",File};
- Rootname ->
- {File,Rootname}
- end,
- AbsFile = filename:absname(WithExt),
- Module = list_to_atom(filename:basename(WithoutExt)),
- compile_beam(Module,AbsFile).
-
-compile_beam(Module,File) ->
- call({compile_beam,Module,File}).
-
+compile_beam(ModFiles) when is_list(ModFiles) ->
+ compile_beams(ModFiles).
%% compile_beam_directory(Dir) -> [Result] | {error,Reason}
@@ -312,19 +314,52 @@ compile_beam_directory(Dir) when is_list(Dir) ->
Error
end.
-compile_beams(Files) ->
- compile_beams(Files,[]).
-compile_beams([File|Files],Result) ->
- R = compile_beam(File),
- compile_beams(Files,[R|Result]);
-compile_beams([],Result) ->
- lists:reverse(Result).
-
+compile_beams(ModFiles0) ->
+ ModFiles = get_mods_and_beams(ModFiles0,[]),
+ call({compile_beams,ModFiles}).
-%% analyse(Module) ->
-%% analyse(Module, Analysis) ->
-%% analyse(Module, Level) ->
-%% analyse(Module, Analysis, Level) -> {ok,Answer} | {error,Error}
+get_mods_and_beams([Module|ModFiles],Acc) when is_atom(Module) ->
+ case code:which(Module) of
+ non_existing ->
+ get_mods_and_beams(ModFiles,[{error,{non_existing,Module}}|Acc]);
+ File ->
+ get_mods_and_beams([{Module,File}|ModFiles],Acc)
+ end;
+get_mods_and_beams([File|ModFiles],Acc) when is_list(File) ->
+ {WithExt,WithoutExt}
+ = case filename:rootname(File,".beam") of
+ File ->
+ {File++".beam",File};
+ Rootname ->
+ {File,Rootname}
+ end,
+ AbsFile = filename:absname(WithExt),
+ Module = list_to_atom(filename:basename(WithoutExt)),
+ get_mods_and_beams([{Module,AbsFile}|ModFiles],Acc);
+get_mods_and_beams([{Module,File}|ModFiles],Acc) ->
+ %% Check for duplicates
+ case lists:keyfind(Module,2,Acc) of
+ {ok,Module,File} ->
+ %% Duplicate, but same file so ignore
+ get_mods_and_beams(ModFiles,Acc);
+ {ok,Module,_OtherFile} ->
+ %% Duplicate and differnet file - error
+ get_mods_and_beams(ModFiles,[{error,{duplicate,Module}}|Acc]);
+ _ ->
+ get_mods_and_beams(ModFiles,[{ok,Module,File}|Acc])
+ end;
+get_mods_and_beams([],Acc) ->
+ lists:reverse(Acc).
+
+
+%% analyse(Modules) ->
+%% analyse(Analysis) ->
+%% analyse(Level) ->
+%% analyse(Modules, Analysis) ->
+%% analyse(Modules, Level) ->
+%% analyse(Analysis, Level)
+%% analyse(Modules, Analysis, Level) -> {ok,Answer} | {error,Error}
+%% Modules = Module | [Module]
%% Module = atom()
%% Analysis = coverage | calls
%% Level = line | clause | function | module
@@ -337,48 +372,74 @@ compile_beams([],Result) ->
%% N = A = C = integer()
%% Value = {Cov,NotCov} | Calls
%% Cov = NotCov = Calls = integer()
-%% Error = {not_cover_compiled,Module}
+%% Error = {not_cover_compiled,Module} | not_main_node
+-define(is_analysis(__A__),
+ (__A__=:=coverage orelse __A__=:=calls)).
+-define(is_level(__L__),
+ (__L__=:=line orelse __L__=:=clause orelse
+ __L__=:=function orelse __L__=:=module)).
+analyse() ->
+ analyse('_').
+
+analyse(Analysis) when ?is_analysis(Analysis) ->
+ analyse('_', Analysis);
+analyse(Level) when ?is_level(Level) ->
+ analyse('_', Level);
analyse(Module) ->
analyse(Module, coverage).
-analyse(Module, Analysis) when Analysis=:=coverage; Analysis=:=calls ->
+
+analyse(Analysis, Level) when ?is_analysis(Analysis) andalso
+ ?is_level(Level) ->
+ analyse('_', Analysis, Level);
+analyse(Module, Analysis) when ?is_analysis(Analysis) ->
analyse(Module, Analysis, function);
-analyse(Module, Level) when Level=:=line; Level=:=clause; Level=:=function;
- Level=:=module ->
+analyse(Module, Level) when ?is_level(Level) ->
analyse(Module, coverage, Level).
-analyse(Module, Analysis, Level) when is_atom(Module),
- Analysis=:=coverage; Analysis=:=calls,
- Level=:=line; Level=:=clause;
- Level=:=function; Level=:=module ->
+
+analyse(Module, Analysis, Level) when ?is_analysis(Analysis),
+ ?is_level(Level) ->
call({{analyse, Analysis, Level}, Module}).
+analyze() -> analyse( ).
analyze(Module) -> analyse(Module).
analyze(Module, Analysis) -> analyse(Module, Analysis).
analyze(Module, Analysis, Level) -> analyse(Module, Analysis, Level).
-%% analyse_to_file(Module) ->
-%% analyse_to_file(Module, Options) ->
-%% analyse_to_file(Module, OutFile) ->
-%% analyse_to_file(Module, OutFile, Options) -> {ok,OutFile} | {error,Error}
+%% analyse_to_file() ->
+%% analyse_to_file(Modules) ->
+%% analyse_to_file(Modules, Options) ->
+%% Modules = Module | [Module]
%% Module = atom()
%% OutFile = string()
%% Options = [Option]
-%% Option = html
+%% Option = html | {outfile,filename()} | {outdir,dirname()}
%% Error = {not_cover_compiled,Module} | no_source_code_found |
%% {file,File,Reason}
%% File = string()
%% Reason = term()
-analyse_to_file(Module) when is_atom(Module) ->
- analyse_to_file(Module, outfilename(Module,[]), []).
-analyse_to_file(Module, []) when is_atom(Module) ->
- analyse_to_file(Module, outfilename(Module,[]), []);
-analyse_to_file(Module, Options) when is_atom(Module),
- is_list(Options), is_atom(hd(Options)) ->
- analyse_to_file(Module, outfilename(Module,Options), Options);
-analyse_to_file(Module, OutFile) when is_atom(Module), is_list(OutFile) ->
- analyse_to_file(Module, OutFile, []).
-analyse_to_file(Module, OutFile, Options) when is_atom(Module), is_list(OutFile) ->
- call({{analyse_to_file, OutFile, Options}, Module}).
-
+%%
+%% Kept for backwards compatibility:
+%% analyse_to_file(Modules, OutFile) ->
+%% analyse_to_file(Modules, OutFile, Options) -> {ok,OutFile} | {error,Error}
+analyse_to_file() ->
+ analyse_to_file('_').
+analyse_to_file(Arg) ->
+ case is_options(Arg) of
+ true ->
+ analyse_to_file('_',Arg);
+ false ->
+ analyse_to_file(Arg,[])
+ end.
+analyse_to_file(Module, OutFile) when is_list(OutFile), is_integer(hd(OutFile)) ->
+ %% Kept for backwards compatibility
+ analyse_to_file(Module, [{outfile,OutFile}]);
+analyse_to_file(Module, Options) when is_list(Options) ->
+ call({{analyse_to_file, Options}, Module}).
+analyse_to_file(Module, OutFile, Options) when is_list(OutFile) ->
+ %% Kept for backwards compatibility
+ analyse_to_file(Module,[{outfile,OutFile}|Options]).
+
+analyze_to_file() -> analyse_to_file().
analyze_to_file(Module) -> analyse_to_file(Module).
analyze_to_file(Module, OptOrOut) -> analyse_to_file(Module, OptOrOut).
analyze_to_file(Module, OutFile, Options) ->
@@ -391,6 +452,15 @@ async_analyse_to_file(Module, OutFileOrOpts) ->
async_analyse_to_file(Module, OutFile, Options) ->
do_spawn(?MODULE, analyse_to_file, [Module, OutFile, Options]).
+is_options([html]) ->
+ true; % this is not 100% safe - could be a module named html...
+is_options([html|Opts]) ->
+ is_options(Opts);
+is_options([{Opt,_}|_]) when Opt==outfile; Opt==outdir ->
+ true;
+is_options(_) ->
+ false.
+
do_spawn(M,F,A) ->
spawn_link(fun() ->
case apply(M,F,A) of
@@ -408,13 +478,16 @@ async_analyze_to_file(Module, OutFileOrOpts) ->
async_analyze_to_file(Module, OutFile, Options) ->
async_analyse_to_file(Module, OutFile, Options).
-outfilename(Module,Opts) ->
- case lists:member(html,Opts) of
- true ->
- atom_to_list(Module)++".COVER.html";
- false ->
- atom_to_list(Module)++".COVER.out"
- end.
+outfilename(undefined, Module, HTML) ->
+ outfilename(Module, HTML);
+outfilename(OutDir, Module, HTML) ->
+ filename:join(OutDir, outfilename(Module, HTML)).
+
+outfilename(Module, true) ->
+ atom_to_list(Module)++".COVER.html";
+outfilename(Module, false) ->
+ atom_to_list(Module)++".COVER.out".
+
%% export(File)
%% export(File,Module) -> ok | {error,Reason}
@@ -559,7 +632,7 @@ init_main(Starter) ->
,{write_concurrency, true}
]),
ets:new(?COVER_CLAUSE_TABLE, [set, public, named_table]),
- ets:new(?BINARY_TABLE, [set, named_table]),
+ ets:new(?BINARY_TABLE, [set, public, named_table]),
ets:new(?COLLECTION_TABLE, [set, public, named_table]),
ets:new(?COLLECTION_CLAUSE_TABLE, [set, public, named_table]),
net_kernel:monitor_nodes(true),
@@ -573,55 +646,19 @@ main_process_loop(State) ->
reply(From, {ok,StartedNodes}),
main_process_loop(State1);
- {From, {compile, File, Options}} ->
- case do_compile(File, Options) of
- {ok, Module} ->
- remote_load_compiled(State#main_state.nodes,[{Module,File}]),
- reply(From, {ok, Module}),
- Compiled = add_compiled(Module, File,
- State#main_state.compiled),
- Imported = remove_imported(Module,State#main_state.imported),
- NewState = State#main_state{compiled = Compiled,
- imported = Imported},
- %% This module (cover) could have been reloaded. Make
- %% sure we run the new code.
- ?MODULE:main_process_loop(NewState);
- error ->
- reply(From, {error, File}),
- main_process_loop(State)
- end;
+ {From, {compile, Files, Options}} ->
+ {R,S} = do_compile(Files, Options, State),
+ reply(From,R),
+ %% This module (cover) could have been reloaded. Make
+ %% sure we run the new code.
+ ?MODULE:main_process_loop(S);
- {From, {compile_beam, Module, BeamFile0}} ->
- Compiled0 = State#main_state.compiled,
- case get_beam_file(Module,BeamFile0,Compiled0) of
- {ok,BeamFile} ->
- UserOptions = get_compile_options(Module,BeamFile),
- {Reply,Compiled} =
- case do_compile_beam(Module,BeamFile,UserOptions) of
- {ok, Module} ->
- remote_load_compiled(State#main_state.nodes,
- [{Module,BeamFile}]),
- C = add_compiled(Module,BeamFile,Compiled0),
- {{ok,Module},C};
- error ->
- {{error, BeamFile}, Compiled0};
- {error,Reason} -> % no abstract code
- {{error, {Reason, BeamFile}}, Compiled0}
- end,
- reply(From,Reply),
- Imported = remove_imported(Module,State#main_state.imported),
- NewState = State#main_state{compiled = Compiled,
- imported = Imported},
- %% This module (cover) could have been reloaded. Make
- %% sure we run the new code.
- ?MODULE:main_process_loop(NewState);
- {error,no_beam} ->
- %% The module has first been compiled from .erl, and now
- %% someone tries to compile it from .beam
- reply(From,
- {error,{already_cover_compiled,no_beam_found,Module}}),
- main_process_loop(State)
- end;
+ {From, {compile_beams, ModsAndFiles}} ->
+ {R,S} = do_compile_beams(ModsAndFiles,State),
+ reply(From,R),
+ %% This module (cover) could have been reloaded. Make
+ %% sure we run the new code.
+ ?MODULE:main_process_loop(S);
{From, {export,OutFile,Module}} ->
spawn(fun() ->
@@ -706,6 +743,16 @@ main_process_loop(State) ->
unregister(?SERVER),
reply(From, ok);
+ {From, {{analyse, Analysis, Level}, '_'}} ->
+ R = analyse_all(Analysis, Level, State),
+ reply(From, R),
+ main_process_loop(State);
+
+ {From, {{analyse, Analysis, Level}, Modules}} when is_list(Modules) ->
+ R = analyse_list(Modules, Analysis, Level, State),
+ reply(From, R),
+ main_process_loop(State);
+
{From, {{analyse, Analysis, Level}, Module}} ->
S = try
Loaded = is_loaded(Module, State),
@@ -722,15 +769,23 @@ main_process_loop(State) ->
end,
main_process_loop(S);
- {From, {{analyse_to_file, OutFile, Opts},Module}} ->
+ {From, {{analyse_to_file, Opts},'_'}} ->
+ R = analyse_all_to_file(Opts, State),
+ reply(From,R),
+ main_process_loop(State);
+
+ {From, {{analyse_to_file, Opts},Modules}} when is_list(Modules) ->
+ R = analyse_list_to_file(Modules, Opts, State),
+ reply(From,R),
+ main_process_loop(State);
+
+ {From, {{analyse_to_file, Opts},Module}} ->
S = try
Loaded = is_loaded(Module, State),
spawn(fun() ->
- ?SPAWN_DBG(analyse_to_file,
- {Module,OutFile, Opts}),
+ ?SPAWN_DBG(analyse_to_file,{Module,Opts}),
do_parallel_analysis_to_file(
- Module, OutFile, Opts,
- Loaded, From, State)
+ Module, Opts, Loaded, From, State)
end),
State
catch throw:Reason ->
@@ -848,11 +903,15 @@ remote_process_loop(State) ->
{remote,collect,Module,CollectorPid} ->
self() ! {remote,collect,Module,CollectorPid, ?SERVER};
- {remote,collect,Module,CollectorPid,From} ->
+ {remote,collect,Modules0,CollectorPid,From} ->
+ Modules = case Modules0 of
+ '_' -> [M || {M,_} <- State#remote_state.compiled];
+ _ -> Modules0
+ end,
spawn(fun() ->
?SPAWN_DBG(remote_collect,
- {Module, CollectorPid, From}),
- do_collect(Module, CollectorPid, From)
+ {Modules, CollectorPid, From}),
+ do_collect(Modules, CollectorPid, From)
end),
remote_process_loop(State);
@@ -893,39 +952,51 @@ remote_process_loop(State) ->
end.
-do_collect(Module, CollectorPid, From) ->
- AllMods =
- case Module of
- '_' -> ets:tab2list(?COVER_CLAUSE_TABLE);
- _ -> ets:lookup(?COVER_CLAUSE_TABLE, Module)
- end,
-
- %% Sending clause by clause in order to avoid large lists
+do_collect(Modules, CollectorPid, From) ->
pmap(
- fun({_Mod,Clauses}) ->
- lists:map(fun(Clause) ->
- send_collected_data(Clause, CollectorPid)
- end,Clauses)
- end,AllMods),
+ fun(Module) ->
+ Pattern = {#bump{module=Module, _='_'}, '$1'},
+ MatchSpec = [{Pattern,[{'=/=','$1',0}],['$_']}],
+ Match = ets:select(?COVER_TABLE,MatchSpec,?CHUNK_SIZE),
+ send_chunks(Match, CollectorPid, [])
+ end,Modules),
CollectorPid ! done,
remote_reply(From, ok).
-send_collected_data({M,F,A,C,_L}, CollectorPid) ->
- Pattern =
- {#bump{module=M, function=F, arity=A, clause=C}, '_'},
- Bumps = ets:match_object(?COVER_TABLE, Pattern),
- %% Reset
- lists:foreach(fun({Bump,_N}) ->
- ets:insert(?COVER_TABLE, {Bump,0})
- end,
- Bumps),
- CollectorPid ! {chunk,Bumps}.
+send_chunks('$end_of_table', _CollectorPid, Mons) ->
+ get_downs(Mons);
+send_chunks({Chunk,Continuation}, CollectorPid, Mons) ->
+ Mon = spawn_monitor(
+ fun() ->
+ lists:foreach(fun({Bump,_N}) ->
+ ets:insert(?COVER_TABLE, {Bump,0})
+ end,
+ Chunk) end),
+ send_chunk(CollectorPid,Chunk),
+ send_chunks(ets:select(Continuation), CollectorPid, [Mon|Mons]).
+
+send_chunk(CollectorPid,Chunk) ->
+ CollectorPid ! {chunk,Chunk,self()},
+ receive continue -> ok end.
+
+get_downs([]) ->
+ ok;
+get_downs(Mons) ->
+ receive
+ {'DOWN', Ref, _Type, Pid, _Reason} = Down ->
+ case lists:member({Pid,Ref},Mons) of
+ true ->
+ get_downs(lists:delete({Pid,Ref},Mons));
+ false ->
+ %% This should be handled somewhere else
+ self() ! Down,
+ get_downs(Mons)
+ end
+ end.
-reload_originals([{Module,_File}|Compiled]) ->
- do_reload_original(Module),
- reload_originals(Compiled);
-reload_originals([]) ->
- ok.
+reload_originals(Compiled) ->
+ Modules = [M || {M,_} <- Compiled],
+ pmap(fun do_reload_original/1, Modules).
do_reload_original(Module) ->
case code:which(Module) of
@@ -1068,15 +1139,40 @@ remote_load_compiled(_Nodes, [], [], _ModNum) ->
ok;
remote_load_compiled(Nodes, Compiled, Acc, ModNum)
when Compiled == []; ModNum == ?MAX_MODS ->
+ RemoteLoadData = get_downs_r(Acc),
lists:foreach(
fun(Node) ->
- remote_call(Node,{remote,load_compiled,Acc})
+ remote_call(Node,{remote,load_compiled,RemoteLoadData})
end,
Nodes),
remote_load_compiled(Nodes, Compiled, [], 0);
remote_load_compiled(Nodes, [MF | Rest], Acc, ModNum) ->
remote_load_compiled(
- Nodes, Rest, [get_data_for_remote_loading(MF) | Acc], ModNum + 1).
+ Nodes, Rest,
+ [spawn_job_r(fun() -> get_data_for_remote_loading(MF) end) | Acc],
+ ModNum + 1).
+
+spawn_job_r(Fun) ->
+ spawn_monitor(fun() -> exit(Fun()) end).
+
+get_downs_r([]) ->
+ [];
+get_downs_r(Mons) ->
+ receive
+ {'DOWN', Ref, _Type, Pid, R={_,_,_,_}} ->
+ [R|get_downs_r(lists:delete({Pid,Ref},Mons))];
+ {'DOWN', Ref, _Type, Pid, Reason} = Down ->
+ case lists:member({Pid,Ref},Mons) of
+ true ->
+ %% Something went really wrong - don't hang!
+ exit(Reason);
+ false ->
+ %% This should be handled somewhere else
+ self() ! Down,
+ get_downs_r(Mons)
+ end
+ end.
+
%% Read all data needed for loading a cover compiled module on a remote node
%% Binary is the beam code for the module and InitialTable is the initial
@@ -1113,11 +1209,11 @@ remote_reset(Module,Nodes) ->
Nodes).
%% Collect data from remote nodes - used for analyse or stop(Node)
-remote_collect(Module,Nodes,Stop) ->
+remote_collect(Modules,Nodes,Stop) ->
pmap(fun(Node) ->
?SPAWN_DBG(remote_collect,
- {Module, Nodes, Stop}),
- do_collection(Node, Module, Stop)
+ {Modules, Nodes, Stop}),
+ do_collection(Node, Modules, Stop)
end,
Nodes).
@@ -1138,8 +1234,9 @@ do_collection(Node, Module, Stop) ->
collector_proc() ->
?SPAWN_DBG(collector_proc, []),
receive
- {chunk,Chunk} ->
+ {chunk,Chunk,From} ->
insert_in_collection_table(Chunk),
+ From ! continue,
collector_proc();
done ->
ok
@@ -1259,6 +1356,19 @@ add_compiled(Module, File, [H|Compiled]) ->
add_compiled(Module, File, []) ->
[{Module,File}].
+are_loaded([Module|Modules], State, Loaded, Imported, Error) ->
+ try is_loaded(Module,State) of
+ {loaded,File} ->
+ are_loaded(Modules, State, [{Module,File}|Loaded], Imported, Error);
+ {imported,File,_} ->
+ are_loaded(Modules, State, Loaded, [{Module,File}|Imported], Error)
+ catch throw:_ ->
+ are_loaded(Modules, State, Loaded, Imported,
+ [{not_cover_compiled,Module}|Error])
+ end;
+are_loaded([], _State, Loaded, Imported, Error) ->
+ {Loaded, Imported, Error}.
+
is_loaded(Module, State) ->
case get_file(Module, State#main_state.compiled) of
{ok, File} ->
@@ -1333,18 +1443,75 @@ get_compiled_still_loaded(Nodes,Compiled0) ->
%%%--Compilation---------------------------------------------------------
-%% do_compile(File, Options) -> {ok,Module} | {error,Error}
-do_compile(File, UserOptions) ->
+do_compile_beams(ModsAndFiles, State) ->
+ Result0 = pmap(fun({ok,Module,File}) ->
+ do_compile_beam(Module,File,State);
+ (Error) ->
+ Error
+ end,
+ ModsAndFiles),
+ Compiled = [{M,F} || {ok,M,F} <- Result0],
+ remote_load_compiled(State#main_state.nodes,Compiled),
+ fix_state_and_result(Result0,State,[]).
+
+do_compile_beam(Module,BeamFile0,State) ->
+ case get_beam_file(Module,BeamFile0,State#main_state.compiled) of
+ {ok,BeamFile} ->
+ UserOptions = get_compile_options(Module,BeamFile),
+ case do_compile_beam1(Module,BeamFile,UserOptions) of
+ {ok, Module} ->
+ {ok,Module,BeamFile};
+ error ->
+ {error, BeamFile};
+ {error,Reason} -> % no abstract code
+ {error, {Reason, BeamFile}}
+ end;
+ {error,no_beam} ->
+ %% The module has first been compiled from .erl, and now
+ %% someone tries to compile it from .beam
+ {error,{already_cover_compiled,no_beam_found,Module}}
+ end.
+
+fix_state_and_result([{ok,Module,BeamFile}|Rest],State,Acc) ->
+ Compiled = add_compiled(Module,BeamFile,State#main_state.compiled),
+ Imported = remove_imported(Module,State#main_state.imported),
+ NewState = State#main_state{compiled=Compiled,imported=Imported},
+ fix_state_and_result(Rest,NewState,[{ok,Module}|Acc]);
+fix_state_and_result([Error|Rest],State,Acc) ->
+ fix_state_and_result(Rest,State,[Error|Acc]);
+fix_state_and_result([],State,Acc) ->
+ {lists:reverse(Acc),State}.
+
+
+do_compile(Files, Options, State) ->
+ Result0 = pmap(fun(File) ->
+ do_compile(File, Options)
+ end,
+ Files),
+ Compiled = [{M,F} || {ok,M,F} <- Result0],
+ remote_load_compiled(State#main_state.nodes,Compiled),
+ fix_state_and_result(Result0,State,[]).
+
+do_compile(File, Options) ->
+ case do_compile1(File, Options) of
+ {ok, Module} ->
+ {ok,Module,File};
+ error ->
+ {error,File}
+ end.
+
+%% do_compile1(File, Options) -> {ok,Module} | error
+do_compile1(File, UserOptions) ->
Options = [debug_info,binary,report_errors,report_warnings] ++ UserOptions,
case compile:file(File, Options) of
{ok, Module, Binary} ->
- do_compile_beam(Module,Binary,UserOptions);
+ do_compile_beam1(Module,Binary,UserOptions);
error ->
error
end.
%% Beam is a binary or a .beam file name
-do_compile_beam(Module,Beam,UserOptions) ->
+do_compile_beam1(Module,Beam,UserOptions) ->
%% Clear database
do_clear(Module),
@@ -1915,10 +2082,21 @@ common_elems(L1, L2) ->
collect(Nodes) ->
%% local node
AllClauses = ets:tab2list(?COVER_CLAUSE_TABLE),
- pmap(fun move_modules/1,AllClauses),
-
+ Mon1 = spawn_monitor(fun() -> pmap(fun move_modules/1,AllClauses) end),
+
+ %% remote nodes
+ Mon2 = spawn_monitor(fun() -> remote_collect('_',Nodes,false) end),
+ get_downs([Mon1,Mon2]).
+
+%% Collect data for a list of modules
+collect(Modules,Nodes) ->
+ MS = [{{'$1','_'},[{'==','$1',M}],['$_']} || M <- Modules],
+ Clauses = ets:select(?COVER_CLAUSE_TABLE,MS),
+ Mon1 = spawn_monitor(fun() -> pmap(fun move_modules/1,Clauses) end),
+
%% remote nodes
- remote_collect('_',Nodes,false).
+ Mon2 = spawn_monitor(fun() -> remote_collect('_',Nodes,false) end),
+ get_downs([Mon1,Mon2]).
%% Collect data for one module
collect(Module,Clauses,Nodes) ->
@@ -1926,25 +2104,26 @@ collect(Module,Clauses,Nodes) ->
move_modules({Module,Clauses}),
%% remote nodes
- remote_collect(Module,Nodes,false).
+ remote_collect([Module],Nodes,false).
%% When analysing, the data from the local ?COVER_TABLE is moved to the
%% ?COLLECTION_TABLE. Resetting data in ?COVER_TABLE
move_modules({Module,Clauses}) ->
ets:insert(?COLLECTION_CLAUSE_TABLE,{Module,Clauses}),
- move_clauses(Clauses).
+ Pattern = {#bump{module=Module, _='_'}, '_'},
+ MatchSpec = [{Pattern,[],['$_']}],
+ Match = ets:select(?COVER_TABLE,MatchSpec,?CHUNK_SIZE),
+ do_move_module(Match).
-move_clauses([{M,F,A,C,_L}|Clauses]) ->
- Pattern = {#bump{module=M, function=F, arity=A, clause=C}, '_'},
- Bumps = ets:match_object(?COVER_TABLE,Pattern),
+do_move_module({Bumps,Continuation}) ->
lists:foreach(fun({Key,Val}) ->
ets:insert(?COVER_TABLE, {Key,0}),
insert_in_collection_table(Key,Val)
end,
Bumps),
- move_clauses(Clauses);
-move_clauses([]) ->
+ do_move_module(ets:select(Continuation));
+do_move_module('$end_of_table') ->
ok.
%% Given a .beam file, find the .erl file. Look first in same directory as
@@ -2002,6 +2181,26 @@ splice(BeamDir, SrcFile) ->
revsplit(Path) ->
lists:reverse(filename:split(Path)).
+analyse_list(Modules, Analysis, Level, State) ->
+ {LoadedMF, ImportedMF, Error} = are_loaded(Modules, State, [], [], []),
+ Loaded = [M || {M,_} <- LoadedMF],
+ Imported = [M || {M,_} <- ImportedMF],
+ collect(Loaded, State#main_state.nodes),
+ MS = [{{'$1','_'},[{'==','$1',M}],['$_']} || M <- Loaded ++ Imported],
+ AllClauses = ets:select(?COLLECTION_CLAUSE_TABLE,MS),
+ Fun = fun({Module,Clauses}) ->
+ do_analyse(Module, Analysis, Level, Clauses)
+ end,
+ {result, lists:flatten(pmap(Fun, AllClauses)), Error}.
+
+analyse_all(Analysis, Level, State) ->
+ collect(State#main_state.nodes),
+ AllClauses = ets:tab2list(?COLLECTION_CLAUSE_TABLE),
+ Fun = fun({Module,Clauses}) ->
+ do_analyse(Module, Analysis, Level, Clauses)
+ end,
+ {result, lists:flatten(pmap(Fun, AllClauses)), []}.
+
do_parallel_analysis(Module, Analysis, Level, Loaded, From, State) ->
analyse_info(Module,State#main_state.imported),
C = case Loaded of
@@ -2016,7 +2215,7 @@ do_parallel_analysis(Module, Analysis, Level, Loaded, From, State) ->
Clauses
end,
R = do_analyse(Module, Analysis, Level, C),
- reply(From, R).
+ reply(From, {ok,R}).
%% do_analyse(Module, Analysis, Level, Clauses)-> {ok,Answer} | {error,Error}
%% Clauses = [{Module,Function,Arity,Clause,Lines}]
@@ -2035,37 +2234,44 @@ do_analyse(Module, Analysis, line, _Clauses) ->
{{Module,L}, N}
end
end,
- Answer = lists:keysort(1, lists:map(Fun, Bumps)),
- {ok, Answer};
-do_analyse(_Module, Analysis, clause, Clauses) ->
- Fun = case Analysis of
- coverage ->
- fun({M,F,A,C,Ls}) ->
- Pattern = {#bump{module=M,function=F,arity=A,
- clause=C},0},
- Bumps = ets:match_object(?COLLECTION_TABLE, Pattern),
- NotCov = length(Bumps),
- {{M,F,A,C}, {Ls-NotCov, NotCov}}
- end;
- calls ->
- fun({M,F,A,C,_Ls}) ->
- Pattern = {#bump{module=M,function=F,arity=A,
- clause=C},'_'},
- Bumps = ets:match_object(?COLLECTION_TABLE, Pattern),
- {_Bump, Calls} = hd(lists:keysort(1, Bumps)),
- {{M,F,A,C}, Calls}
- end
- end,
- Answer = lists:map(Fun, Clauses),
- {ok, Answer};
+ lists:keysort(1, lists:map(Fun, Bumps));
+do_analyse(Module, Analysis, clause, _Clauses) ->
+ Pattern = {#bump{module=Module},'_'},
+ Bumps = lists:keysort(1,ets:match_object(?COLLECTION_TABLE, Pattern)),
+ analyse_clause(Analysis,Bumps);
do_analyse(Module, Analysis, function, Clauses) ->
- {ok, ClauseResult} = do_analyse(Module, Analysis, clause, Clauses),
- Result = merge_clauses(ClauseResult, merge_fun(Analysis)),
- {ok, Result};
+ ClauseResult = do_analyse(Module, Analysis, clause, Clauses),
+ merge_clauses(ClauseResult, merge_fun(Analysis));
do_analyse(Module, Analysis, module, Clauses) ->
- {ok, FunctionResult} = do_analyse(Module, Analysis, function, Clauses),
+ FunctionResult = do_analyse(Module, Analysis, function, Clauses),
Result = merge_functions(FunctionResult, merge_fun(Analysis)),
- {ok, {Module,Result}}.
+ {Module,Result}.
+
+analyse_clause(_,[]) ->
+ [];
+analyse_clause(coverage,
+ [{#bump{module=M,function=F,arity=A,clause=C},_}|_]=Bumps) ->
+ analyse_clause_cov(Bumps,{M,F,A,C},0,0,[]);
+analyse_clause(calls,Bumps) ->
+ analyse_clause_calls(Bumps,{x,x,x,x},[]).
+
+analyse_clause_cov([{#bump{module=M,function=F,arity=A,clause=C},N}|Bumps],
+ {M,F,A,C}=Clause,Ls,NotCov,Acc) ->
+ analyse_clause_cov(Bumps,Clause,Ls+1,if N==0->NotCov+1; true->NotCov end,Acc);
+analyse_clause_cov([{#bump{module=M1,function=F1,arity=A1,clause=C1},_}|_]=Bumps,
+ Clause,Ls,NotCov,Acc) ->
+ analyse_clause_cov(Bumps,{M1,F1,A1,C1},0,0,[{Clause,{Ls-NotCov,NotCov}}|Acc]);
+analyse_clause_cov([],Clause,Ls,NotCov,Acc) ->
+ lists:reverse(Acc,[{Clause,{Ls-NotCov,NotCov}}]).
+
+analyse_clause_calls([{#bump{module=M,function=F,arity=A,clause=C},_}|Bumps],
+ {M,F,A,C}=Clause,Acc) ->
+ analyse_clause_calls(Bumps,Clause,Acc);
+analyse_clause_calls([{#bump{module=M1,function=F1,arity=A1,clause=C1},N}|Bumps],
+ _Clause,Acc) ->
+ analyse_clause_calls(Bumps,{M1,F1,A1,C1},[{{M1,F1,A1,C1},N}|Acc]);
+analyse_clause_calls([],_Clause,Acc) ->
+ lists:reverse(Acc).
merge_fun(coverage) ->
fun({Cov1,NotCov1}, {Cov2,NotCov2}) ->
@@ -2094,7 +2300,50 @@ merge_functions([{_MFA,R}|Functions], MFun, Result) ->
merge_functions([], _MFun, Result) ->
Result.
-do_parallel_analysis_to_file(Module, OutFile, Opts, Loaded, From, State) ->
+analyse_list_to_file(Modules, Opts, State) ->
+ {LoadedMF, ImportedMF, Error} = are_loaded(Modules, State, [], [], []),
+ collect([M || {M,_} <- LoadedMF], State#main_state.nodes),
+ OutDir = proplists:get_value(outdir,Opts),
+ HTML = lists:member(html,Opts),
+ Fun = fun({Module,File}) ->
+ OutFile = outfilename(OutDir,Module,HTML),
+ do_analyse_to_file(Module,File,OutFile,HTML,State)
+ end,
+ {Ok,Error1} = split_ok_error(pmap(Fun, LoadedMF++ImportedMF),[],[]),
+ {result,Ok,Error ++ Error1}.
+
+analyse_all_to_file(Opts, State) ->
+ collect(State#main_state.nodes),
+ AllModules = get_all_modules(State),
+ OutDir = proplists:get_value(outdir,Opts),
+ HTML = lists:member(html,Opts),
+ Fun = fun({Module,File}) ->
+ OutFile = outfilename(OutDir,Module,HTML),
+ do_analyse_to_file(Module,File,OutFile,HTML,State)
+ end,
+ {Ok,Error} = split_ok_error(pmap(Fun, AllModules),[],[]),
+ {result,Ok,Error}.
+
+get_all_modules(State) ->
+ get_all_modules(State#main_state.compiled ++ State#main_state.imported,[]).
+get_all_modules([{Module,File}|Rest],Acc) ->
+ get_all_modules(Rest,[{Module,File}|Acc]);
+get_all_modules([{Module,File,_}|Rest],Acc) ->
+ case lists:keymember(Module,1,Acc) of
+ true -> get_all_modules(Rest,Acc);
+ false -> get_all_modules(Rest,[{Module,File}|Acc])
+ end;
+get_all_modules([],Acc) ->
+ Acc.
+
+split_ok_error([{ok,R}|Result],Ok,Error) ->
+ split_ok_error(Result,[R|Ok],Error);
+split_ok_error([{error,R}|Result],Ok,Error) ->
+ split_ok_error(Result,Ok,[R|Error]);
+split_ok_error([],Ok,Error) ->
+ {Ok,Error}.
+
+do_parallel_analysis_to_file(Module, Opts, Loaded, From, State) ->
File = case Loaded of
{loaded, File0} ->
[{Module,Clauses}] =
@@ -2105,21 +2354,29 @@ do_parallel_analysis_to_file(Module, OutFile, Opts, Loaded, From, State) ->
{imported, File0, _} ->
File0
end,
+ HTML = lists:member(html,Opts),
+ OutFile =
+ case proplists:get_value(outfile,Opts) of
+ undefined ->
+ outfilename(proplists:get_value(outdir,Opts),Module,HTML);
+ F ->
+ F
+ end,
+ reply(From, do_analyse_to_file(Module,File,OutFile,HTML,State)).
+
+do_analyse_to_file(Module,File,OutFile,HTML,State) ->
case find_source(Module, File) of
{beam,_BeamFile} ->
- reply(From, {error,no_source_code_found});
+ {error,{no_source_code_found,Module}};
ErlFile ->
analyse_info(Module,State#main_state.imported),
- HTML = lists:member(html,Opts),
- R = do_analyse_to_file(Module,OutFile,
- ErlFile,HTML),
- reply(From, R)
+ do_analyse_to_file1(Module,OutFile,ErlFile,HTML)
end.
-%% do_analyse_to_file(Module,OutFile,ErlFile) -> {ok,OutFile} | {error,Error}
+%% do_analyse_to_file1(Module,OutFile,ErlFile) -> {ok,OutFile} | {error,Error}
%% Module = atom()
%% OutFile = ErlFile = string()
-do_analyse_to_file(Module, OutFile, ErlFile, HTML) ->
+do_analyse_to_file1(Module, OutFile, ErlFile, HTML) ->
case file:open(ErlFile, [read]) of
{ok, InFd} ->
case file:open(OutFile, [write]) of
@@ -2160,7 +2417,10 @@ do_analyse_to_file(Module, OutFile, ErlFile, HTML) ->
"**************************************"
"\n\n"]),
- print_lines(Module, InFd, OutFd, 1, HTML),
+ Pattern = {#bump{module=Module,line='$1',_='_'},'$2'},
+ MS = [{Pattern,[],[{{'$1','$2'}}]}],
+ CovLines = lists:keysort(1,ets:select(?COLLECTION_TABLE, MS)),
+ print_lines(Module, CovLines, InFd, OutFd, 1, HTML),
if HTML -> io:format(OutFd,"</pre>\n</body>\n</html>\n",[]);
true -> ok
@@ -2179,21 +2439,19 @@ do_analyse_to_file(Module, OutFile, ErlFile, HTML) ->
{error, {file, ErlFile, Reason}}
end.
-print_lines(Module, InFd, OutFd, L, HTML) ->
+
+print_lines(Module, CovLines, InFd, OutFd, L, HTML) ->
case io:get_line(InFd, '') of
eof ->
ignore;
"%"++_=Line -> %Comment line - not executed.
io:put_chars(OutFd, [tab(),escape_lt_and_gt(Line, HTML)]),
- print_lines(Module, InFd, OutFd, L+1, HTML);
+ print_lines(Module, CovLines, InFd, OutFd, L+1, HTML);
RawLine ->
Line = escape_lt_and_gt(RawLine,HTML),
- Pattern = {#bump{module=Module,line=L},'$1'},
- case ets:match(?COLLECTION_TABLE, Pattern) of
- [] ->
- io:put_chars(OutFd, [tab(),Line]);
- Ns ->
- N = lists:foldl(fun([Ni], Nacc) -> Nacc+Ni end, 0, Ns),
+ case CovLines of
+ [{L,N}|CovLines1] ->
+ %% N = lists:foldl(fun([Ni], Nacc) -> Nacc+Ni end, 0, Ns),
if
N=:=0, HTML=:=true ->
LineNoNL = Line -- "\n",
@@ -2211,9 +2469,12 @@ print_lines(Module, InFd, OutFd, L, HTML) ->
true ->
Str = integer_to_list(N),
io:put_chars(OutFd, [Str,fill3(),Line])
- end
- end,
- print_lines(Module, InFd, OutFd, L+1, HTML)
+ end,
+ print_lines(Module, CovLines1, InFd, OutFd, L+1, HTML);
+ _ ->
+ io:put_chars(OutFd, [tab(),Line]),
+ print_lines(Module, CovLines, InFd, OutFd, L+1, HTML)
+ end
end.
tab() -> " | ".
@@ -2362,21 +2623,21 @@ do_reset_collection_table(Module) ->
ets:match_delete(?COLLECTION_TABLE, {#bump{module=Module},'_'}).
%% do_reset(Module) -> ok
-%% The reset is done on a per-clause basis to avoid building
+%% The reset is done on ?CHUNK_SIZE number of bumps to avoid building
%% long lists in the case of very large modules
do_reset(Module) ->
- [{Module,Clauses}] = ets:lookup(?COVER_CLAUSE_TABLE, Module),
- do_reset2(Clauses).
+ Pattern = {#bump{module=Module, _='_'}, '$1'},
+ MatchSpec = [{Pattern,[{'=/=','$1',0}],['$_']}],
+ Match = ets:select(?COVER_TABLE,MatchSpec,?CHUNK_SIZE),
+ do_reset2(Match).
-do_reset2([{M,F,A,C,_L}|Clauses]) ->
- Pattern = {#bump{module=M, function=F, arity=A, clause=C}, '_'},
- Bumps = ets:match_object(?COVER_TABLE, Pattern),
+do_reset2({Bumps,Continuation}) ->
lists:foreach(fun({Bump,_N}) ->
ets:insert(?COVER_TABLE, {Bump,0})
end,
Bumps),
- do_reset2(Clauses);
-do_reset2([]) ->
+ do_reset2(ets:select(Continuation));
+do_reset2('$end_of_table') ->
ok.
do_clear(Module) ->
@@ -2419,31 +2680,43 @@ escape_lt_and_gt1([],Acc) ->
escape_lt_and_gt1([H|T],Acc) ->
escape_lt_and_gt1(T,[H|Acc]).
-pmap(Fun, List) ->
- pmap(Fun, List, 20).
-pmap(Fun, List, Limit) ->
- pmap(Fun, List, [], Limit, 0, []).
-pmap(Fun, [E | Rest], Pids, Limit, Cnt, Acc) when Cnt < Limit ->
- Collector = self(),
- Pid = spawn_link(fun() ->
- ?SPAWN_DBG(pmap,E),
- Collector ! {res,self(),Fun(E)}
- end),
- erlang:monitor(process, Pid),
- pmap(Fun, Rest, Pids ++ [Pid], Limit, Cnt + 1, Acc);
-pmap(Fun, List, [Pid | Pids], Limit, Cnt, Acc) ->
- receive
- {'DOWN', _Ref, process, X, _} when is_pid(X) ->
- pmap(Fun, List, [Pid | Pids], Limit, Cnt - 1, Acc);
- {res, Pid, Res} ->
- pmap(Fun, List, Pids, Limit, Cnt, [Res | Acc])
- end;
-pmap(_Fun, [], [], _Limit, 0, Acc) ->
- lists:reverse(Acc);
-pmap(Fun, [], [], Limit, Cnt, Acc) ->
+%%%--Internal functions for parallelization------------------------------
+pmap(Fun,List) ->
+ NTot = length(List),
+ NProcs = erlang:system_info(schedulers) * 2,
+ NPerProc = (NTot div NProcs) + 1,
+ Mons = pmap_spawn(Fun,NPerProc,List,[]),
+ pmap_collect(Mons,[]).
+
+pmap_spawn(_,_,[],Mons) ->
+ Mons;
+pmap_spawn(Fun,NPerProc,List,Mons) ->
+ {L1,L2} = if length(List)>=NPerProc -> lists:split(NPerProc,List);
+ true -> {List,[]} % last chunk
+ end,
+ Mon =
+ spawn_monitor(
+ fun() ->
+ exit({pmap_done,lists:map(Fun,L1)})
+ end),
+ pmap_spawn(Fun,NPerProc,L2,[Mon|Mons]).
+
+pmap_collect([],Acc) ->
+ lists:append(Acc);
+pmap_collect(Mons,Acc) ->
receive
- {'DOWN', _Ref, process, X, _} when is_pid(X) ->
- pmap(Fun, [], [], Limit, Cnt - 1, Acc)
+ {'DOWN', Ref, process, Pid, {pmap_done,Result}} ->
+ pmap_collect(lists:delete({Pid,Ref},Mons),[Result|Acc]);
+ {'DOWN', Ref, process, Pid, Reason} = Down ->
+ case lists:member({Pid,Ref},Mons) of
+ true ->
+ %% Something went really wrong - don't hang!
+ exit(Reason);
+ false ->
+ %% This should be handled somewhere else
+ self() ! Down,
+ pmap_collect(Mons,Acc)
+ end
end.
%%%-----------------------------------------------------------------
diff --git a/lib/tools/src/lcnt.erl b/lib/tools/src/lcnt.erl
index f1251fddab..d5ba8aa52f 100644
--- a/lib/tools/src/lcnt.erl
+++ b/lib/tools/src/lcnt.erl
@@ -305,7 +305,7 @@ handle_call({inspect, Lockname, InOpts}, _From, #state{ duration=Duration, locks
{true, true} -> locks_ids(Filtered);
_ -> []
end,
- Combos = combine_classes(Filtered, proplists:get_value(combine, Opts)),
+ Combos = combine_classes(Filtered, proplists:get_value(combine, Opts)),
case proplists:get_value(locations, Opts) of
true ->
lists:foreach(fun
@@ -329,9 +329,8 @@ handle_call({inspect, Lockname, InOpts}, _From, #state{ duration=Duration, locks
end
end, Combos);
_ ->
- Print1 = locks2print(Combos, Duration),
- Print2 = filter_print(Print1, Opts),
- print_lock_information(Print2, proplists:get_value(print, Opts))
+ Print = filter_print(locks2print(Combos, Duration), Opts),
+ print_lock_information(Print, proplists:get_value(print, Opts))
end,
{reply, ok, State};
@@ -357,8 +356,7 @@ handle_call({histogram, Lockname, InOpts}, _From, #state{ duration=Duration, loc
{thresholds, [{tries, -1}, {colls, -1}, {time, -1}]}], Opts),
Prints = locks2print([L], Duration),
print_lock_information(Prints, proplists:get_value(print, Opts1)),
- print_full_histogram(SumStats#stats.hist),
- io:format("~n")
+ print_full_histogram(SumStats#stats.hist)
end, Combos),
{reply, ok, State};
@@ -509,20 +507,23 @@ filter_locks(Locks, Lockname) ->
% 4. max length of locks
filter_print(PLs, Opts) ->
- TLs = threshold_locks(PLs, proplists:get_value(thresholds, Opts, [])),
- SLs = sort_locks(TLs, proplists:get_value(sort, Opts, time)),
- CLs = cut_locks(SLs, proplists:get_value(max_locks, Opts, none)),
- reverse_locks(CLs, not proplists:get_value(reverse,Opts, false)).
-
-sort_locks(Locks, name) -> lists:keysort(#print.name, Locks);
-sort_locks(Locks, id) -> lists:keysort(#print.id, Locks);
-sort_locks(Locks, type) -> lists:keysort(#print.type, Locks);
-sort_locks(Locks, tries) -> lists:keysort(#print.tries, Locks);
-sort_locks(Locks, colls) -> lists:keysort(#print.colls, Locks);
-sort_locks(Locks, ratio) -> lists:keysort(#print.cr, Locks);
-sort_locks(Locks, time) -> lists:keysort(#print.time, Locks);
+ TLs = threshold_locks(PLs, proplists:get_value(thresholds, Opts, [])),
+ SLs = sort_locks(TLs, proplists:get_value(sort, Opts, time)),
+ CLs = cut_locks(SLs, proplists:get_value(max_locks, Opts, none)),
+ reverse_locks(CLs, proplists:get_value(reverse, Opts, false)).
+
+sort_locks(Locks, name) -> reverse_sort_locks(#print.name, Locks);
+sort_locks(Locks, id) -> reverse_sort_locks(#print.id, Locks);
+sort_locks(Locks, type) -> reverse_sort_locks(#print.type, Locks);
+sort_locks(Locks, tries) -> reverse_sort_locks(#print.tries, Locks);
+sort_locks(Locks, colls) -> reverse_sort_locks(#print.colls, Locks);
+sort_locks(Locks, ratio) -> reverse_sort_locks(#print.cr, Locks);
+sort_locks(Locks, time) -> reverse_sort_locks(#print.time, Locks);
sort_locks(Locks, _) -> sort_locks(Locks, time).
+reverse_sort_locks(Ix, Locks) ->
+ lists:reverse(lists:keysort(Ix, Locks)).
+
% cut locks not above certain thresholds
threshold_locks(Locks, Thresholds) ->
Tries = proplists:get_value(tries, Thresholds, -1),
@@ -647,15 +648,19 @@ format_histogram(Tup) when is_tuple(Tup) ->
_ -> string_histogram([case V of 0 -> 0; _ -> V/Max end || V <- Vs])
end.
-string_histogram([0|Vs]) ->
- [$\s|string_histogram(Vs)];
-string_histogram([V|Vs]) when V > 0.66 ->
- [$X|string_histogram(Vs)];
-string_histogram([V|Vs]) when V > 0.33 ->
- [$x|string_histogram(Vs)];
-string_histogram([_|Vs]) ->
- [$.|string_histogram(Vs)];
-string_histogram([]) -> [].
+string_histogram(Vs) ->
+ [$||histogram_values_to_string(Vs,$|)].
+
+histogram_values_to_string([0|Vs],End) ->
+ [$\s|histogram_values_to_string(Vs,End)];
+histogram_values_to_string([V|Vs],End) when V > 0.66 ->
+ [$X|histogram_values_to_string(Vs,End)];
+histogram_values_to_string([V|Vs],End) when V > 0.33 ->
+ [$x|histogram_values_to_string(Vs,End)];
+histogram_values_to_string([_|Vs],End) ->
+ [$.|histogram_values_to_string(Vs,End)];
+histogram_values_to_string([],End) ->
+ [End].
%% state making
@@ -778,7 +783,7 @@ auto_print_width(Locks, Print) ->
({print,print}, Out) -> [print|Out];
({Str, Len}, Out) -> [erlang:min(erlang:max(length(s(Str))+1,Len),80)|Out]
end, [], lists:zip(tuple_to_list(L), tuple_to_list(Max)))))
- end, #print{ id = 4, type = 5, entry = 5, name = 6, tries = 8, colls = 13, cr = 16, time = 11, dtr = 14, hist=20 },
+ end, #print{ id=4, type=5, entry=5, name=6, tries=8, colls=13, cr=16, time=11, dtr=14, hist=20 },
Locks),
% Setup the offsets for later pruning
Offsets = [
@@ -820,7 +825,7 @@ print_header(Opts) ->
cr = "collisions [%]",
time = "time [us]",
dtr = "duration [%]",
- hist = "histogram"
+ hist = "histogram [log2(us)]"
},
Divider = #print{
name = lists:duplicate(1 + length(Header#print.name), 45),
@@ -863,9 +868,9 @@ format_lock(L, [Opt|Opts]) ->
{time, W} -> [{space, W, s(L#print.time) } | format_lock(L, Opts)];
duration -> [{space, 20, s(L#print.dtr) } | format_lock(L, Opts)];
{duration, W} -> [{space, W, s(L#print.dtr) } | format_lock(L, Opts)];
- histogram -> [{space, 0, s(L#print.hist) } | format_lock(L, Opts)];
- {histogram, W} -> [{space, W, s(L#print.hist) } | format_lock(L, Opts)];
- _ -> format_lock(L, Opts)
+ histogram -> [{space, 20, s(L#print.hist) } | format_lock(L, Opts)];
+ {histogram, W} -> [{left, W - length(s(L#print.hist)) - 1, s(L#print.hist)} | format_lock(L, Opts)];
+ _ -> format_lock(L, Opts)
end.
print_state_information(#state{locks = Locks} = State) ->
@@ -926,6 +931,7 @@ s(T) -> term2string(T).
strings(Strings) -> strings(Strings, []).
strings([], Out) -> Out;
strings([{space, N, S} | Ss], Out) -> strings(Ss, Out ++ term2string(term2string("~~~ws", [N]), [S]));
+strings([{left, N, S} | Ss], Out) -> strings(Ss, Out ++ term2string(term2string(" ~~s~~~ws", [N]), [S,""]));
strings([{format, Format, S} | Ss], Out) -> strings(Ss, Out ++ term2string(Format, [S]));
strings([S|Ss], Out) -> strings(Ss, Out ++ term2string("~ts", [S])).
diff --git a/lib/tools/test/cover_SUITE.erl b/lib/tools/test/cover_SUITE.erl
index 80807b1d38..368fa6c3d1 100644
--- a/lib/tools/test/cover_SUITE.erl
+++ b/lib/tools/test/cover_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -33,6 +33,8 @@
-export([do_coverage/1]).
+-export([distribution_performance/1]).
+
-include_lib("test_server/include/test_server.hrl").
%%----------------------------------------------------------------------
@@ -170,10 +172,15 @@ compile(Config) when is_list(Config) ->
?line {ok, CWD} = file:get_cwd(),
?line Result2 = cover:compile_directory(CWD),
?line SortedResult = lists:sort(Result2),
- ?line [{error,_DFile},{ok,a},{ok,b},{ok,cc},{ok,f}] = SortedResult,
+ ?line [{error,DFile},{ok,a},{ok,b},{ok,cc},{ok,f}] = SortedResult,
?line [{ok,e}] = cover:compile_directory("d1"),
?line {error,enoent} = cover:compile_directory("d2"),
+ [] = cover:compile([]),
+ Result21 = cover:compile([a,b,"cc.erl",d,"f"]),
+ SortedResult21 = lists:sort(Result21),
+ [{error,DFile},{ok,a},{ok,b},{ok,cc},{ok,f}] = SortedResult21,
+
?line {ok,a} = cover:compile(a),
?line {ok,b} = compile:file(b),
?line code:purge(b),
@@ -213,8 +220,14 @@ compile(Config) when is_list(Config) ->
?line {error,non_existing} = cover:compile_beam(z),
?line [{ok,y}] = cover:compile_beam_directory("d"),
?line Result3 = lists:sort(cover:compile_beam_directory()),
- ?line [{error,{no_abstract_code,_XBeam}},{ok,crypt},{ok,v},{ok,w}] = Result3,
+ ?line [{error,{no_abstract_code,XBeam}},{ok,crypt},{ok,v},{ok,w}] = Result3,
?line {error,enoent} = cover:compile_beam_directory("d2"),
+
+ [] = cover:compile_beam([]),
+ Result31 = cover:compile_beam([crypt,"v.beam",w,"x"]),
+ SortedResult31 = lists:sort(Result31),
+ [{error,{no_abstract_code,XBeam}},{ok,crypt},{ok,v},{ok,w}] = SortedResult31,
+
?line decompile([v,w,y]),
?line Files = lsfiles(),
?line remove(files(Files, ".beam")).
@@ -239,20 +252,22 @@ analyse(Config) when is_list(Config) ->
?line done = a:start(5),
- ?line {ok, {a,{17,2}}} = cover:analyse(a, coverage, module),
- ?line {ok, [{{a,start,1},{6,0}},
- {{a,stop,1},{0,1}},
- {{a,pong,1},{1,0}},
- {{a,loop,3},{5,1}},
- {{a,trycatch,1},{4,0}},
- {{a,exit_kalle,0},{1,0}}]} = cover:analyse(a, coverage, function),
- ?line {ok, [{{a,start,1,1},{6,0}},
- {{a,stop,1,1},{0,1}},
- {{a,pong,1,1},{1,0}},
+ {ok, {a,{17,2}}=ACovMod} = cover:analyse(a, coverage, module),
+ {ok, [{{a,exit_kalle,0},{1,0}},
+ {{a,loop,3},{5,1}},
+ {{a,pong,1},{1,0}},
+ {{a,start,1},{6,0}},
+ {{a,stop,1},{0,1}},
+ {{a,trycatch,1},{4,0}}]=ACovFunc} =
+ cover:analyse(a, coverage, function),
+ {ok, [{{a,exit_kalle,0,1},{1,0}},
{{a,loop,3,1},{3,1}},
{{a,loop,3,2},{2,0}},
- {{a,trycatch,1,1},{4,0}},
- {{a,exit_kalle,0,1},{1,0}}]} = cover:analyse(a, coverage, clause),
+ {{a,pong,1,1},{1,0}},
+ {{a,start,1,1},{6,0}},
+ {{a,stop,1,1},{0,1}},
+ {{a,trycatch,1,1},{4,0}}]=ACovClause} =
+ cover:analyse(a, coverage, clause),
?line {ok, [{{a,9},{1,0}},
{{a,10},{1,0}},
{{a,11},{1,0}},
@@ -271,22 +286,22 @@ analyse(Config) when is_list(Config) ->
{{a,47},{1,0}},
{{a,49},{1,0}},
{{a,51},{1,0}},
- {{a,55},{1,0}}]} = cover:analyse(a, coverage, line),
-
- ?line {ok, {a,15}} = cover:analyse(a, calls, module),
- ?line {ok, [{{a,start,1},1},
- {{a,stop,1},0},
- {{a,pong,1},5},
- {{a,loop,3},6},
- {{a,trycatch,1},2},
- {{a,exit_kalle,0},1}]} = cover:analyse(a, calls, function),
- ?line {ok, [{{a,start,1,1},1},
- {{a,stop,1,1},0},
- {{a,pong,1,1},5},
- {{a,loop,3,1},5},
- {{a,loop,3,2},1},
- {{a,trycatch,1,1},2},
- {{a,exit_kalle,0,1},1}]} = cover:analyse(a, calls, clause),
+ {{a,55},{1,0}}]=ACovLine} = cover:analyse(a, coverage, line),
+
+ {ok, {a,15}=ACallsMod} = cover:analyse(a, calls, module),
+ {ok, [{{a,exit_kalle,0},1},
+ {{a,loop,3},6},
+ {{a,pong,1},5},
+ {{a,start,1},1},
+ {{a,stop,1},0},
+ {{a,trycatch,1},2}]=ACallsFunc} = cover:analyse(a, calls, function),
+ {ok, [{{a,exit_kalle,0,1},1},
+ {{a,loop,3,1},5},
+ {{a,loop,3,2},1},
+ {{a,pong,1,1},5},
+ {{a,start,1,1},1},
+ {{a,stop,1,1},0},
+ {{a,trycatch,1,1},2}]=ACallsClause} = cover:analyse(a, calls, clause),
?line {ok, [{{a,9},1},
{{a,10},1},
{{a,11},1},
@@ -305,27 +320,85 @@ analyse(Config) when is_list(Config) ->
{{a,47},1},
{{a,49},1},
{{a,51},2},
- {{a,55},1}]} = cover:analyse(a, calls, line),
-
- ?line {ok, [{{a,start,1},{6,0}},
- {{a,stop,1},{0,1}},
- {{a,pong,1},{1,0}},
- {{a,loop,3},{5,1}},
- {{a,trycatch,1},{4,0}},
- {{a,exit_kalle,0},{1,0}}]} = cover:analyse(a),
- ?line {ok, {a,{17,2}}} = cover:analyse(a, module),
- ?line {ok, [{{a,start,1},1},
- {{a,stop,1},0},
- {{a,pong,1},5},
- {{a,loop,3},6},
- {{a,trycatch,1},2},
- {{a,exit_kalle,0},1}]} = cover:analyse(a, calls),
+ {{a,55},1}]=ACallsLine} = cover:analyse(a, calls, line),
+
+ {ok,ACovFunc} = cover:analyse(a),
+ {ok,ACovMod} = cover:analyse(a, module),
+ {ok,ACallsFunc} = cover:analyse(a, calls),
?line {ok, "a.COVER.out"} = cover:analyse_to_file(a),
?line {ok, "e.COVER.out"} = cover:analyse_to_file(e),
?line {ok, "a.COVER.html"} = cover:analyse_to_file(a,[html]),
?line {ok, "e.COVER.html"} = cover:analyse_to_file(e,[html]),
+ %% Analyse all modules
+ Modules = cover:modules(),
+ N = length(Modules),
+
+ {result,CovFunc,[]} = cover:analyse(), % default = coverage, function
+ ACovFunc = [A || {{a,_,_},_}=A<-CovFunc],
+
+ {result,CovMod,[]} = cover:analyse(coverage,module),
+ ACovMod = lists:keyfind(a,1,CovMod),
+
+ {result,CovClause,[]} = cover:analyse(coverage,clause),
+ ACovClause = [A || {{a,_,_,_},_}=A<-CovClause],
+
+ {result,CovLine,[]} = cover:analyse(coverage,line),
+ ACovLine = [A || {{a,_},_}=A<-CovLine],
+
+ {result,CallsFunc,[]} = cover:analyse(calls,function),
+ ACallsFunc = [A || {{a,_,_},_}=A<-CallsFunc],
+
+ {result,CallsMod,[]} = cover:analyse(calls,module),
+ ACallsMod = lists:keyfind(a,1,CallsMod),
+
+ {result,CallsClause,[]} = cover:analyse(calls,clause),
+ ACallsClause = [A || {{a,_,_,_},_}=A<-CallsClause],
+
+ {result,CallsLine,[]} = cover:analyse(calls,line),
+ ACallsLine = [A || {{a,_},_}=A<-CallsLine],
+
+ {result,AllToFile,[]} = cover:analyse_to_file(),
+ N = length(AllToFile),
+ true = lists:member("a.COVER.out",AllToFile),
+ {result,AllToFileHtml,[]} = cover:analyse_to_file([html]),
+ N = length(AllToFileHtml),
+ true = lists:member("a.COVER.html",AllToFileHtml),
+
+ %% Analyse list of modules
+ %% Listing all modules so we can compare result with above result
+ %% from analysing all.
+
+ {result,CovFunc1,[]} = cover:analyse(Modules), % default = coverage, function
+ true = lists:sort(CovFunc) == lists:sort(CovFunc1),
+
+ {result,CovMod1,[]} = cover:analyse(Modules,coverage,module),
+ true = lists:sort(CovMod) == lists:sort(CovMod1),
+
+ {result,CovClause1,[]} = cover:analyse(Modules,coverage,clause),
+ true = lists:sort(CovClause) == lists:sort(CovClause1),
+
+ {result,CovLine1,[]} = cover:analyse(Modules,coverage,line),
+ true = lists:sort(CovLine) == lists:sort(CovLine1),
+
+ {result,CallsFunc1,[]} = cover:analyse(Modules,calls,function),
+ true = lists:sort(CallsFunc1) == lists:sort(CallsFunc1),
+
+ {result,CallsMod1,[]} = cover:analyse(Modules,calls,module),
+ true = lists:sort(CallsMod) == lists:sort(CallsMod1),
+
+ {result,CallsClause1,[]} = cover:analyse(Modules,calls,clause),
+ true = lists:sort(CallsClause) == lists:sort(CallsClause1),
+
+ {result,CallsLine1,[]} = cover:analyse(Modules,calls,line),
+ true = lists:sort(CallsLine) == lists:sort(CallsLine1),
+
+ {result,AllToFile1,[]} = cover:analyse_to_file(Modules),
+ true = lists:sort(AllToFile) == lists:sort(AllToFile1),
+ {result,AllToFileHtml1,[]} = cover:analyse_to_file(Modules,[html]),
+ true = lists:sort(AllToFileHtml) == lists:sort(AllToFileHtml1),
+
%% analyse_to_file of file which is compiled from beam
?line {ok,f} = compile:file(f,[debug_info]),
?line code:purge(f),
@@ -348,14 +421,17 @@ analyse(Config) when is_list(Config) ->
{module,z} = code:load_file(z),
{ok,z} = cover:compile_beam(z),
ok = file:delete("z.erl"),
- {error,no_source_code_found} = cover:analyse_to_file(z),
+ {error,{no_source_code_found,z}} = cover:analyse_to_file(z),
+ {result,[],[{no_source_code_found,z}]} = cover:analyse_to_file([z]),
code:purge(z),
code:delete(z),
?line {error,{not_cover_compiled,b}} = cover:analyse(b),
?line {error,{not_cover_compiled,g}} = cover:analyse(g),
+ {result,[],[{not_cover_compiled,b}]} = cover:analyse([b]),
?line {error,{not_cover_compiled,b}} = cover:analyse_to_file(b),
- ?line {error,{not_cover_compiled,g}} = cover:analyse_to_file(g).
+ {error,{not_cover_compiled,g}} = cover:analyse_to_file(g),
+ {result,[],[{not_cover_compiled,g}]} = cover:analyse_to_file([g]).
misc(suite) -> [];
misc(Config) when is_list(Config) ->
@@ -680,6 +756,119 @@ stop_node_after_disconnect(Config) ->
?t:stop_node(N1),
ok.
+distribution_performance(Config) ->
+ PrivDir = ?config(priv_dir,Config),
+ Dir = filename:join(PrivDir,"distribution_performance"),
+ AllFiles = filename:join(Dir,"*"),
+ ok = filelib:ensure_dir(AllFiles),
+ code:add_patha(Dir),
+ M = 9, % Generate M modules
+ F = 210, % with F functions
+ C = 10, % and each function of C clauses
+ Mods = generate_modules(M,F,C,Dir),
+
+% ?t:break(""),
+
+ NodeName = cover_SUITE_distribution_performance,
+ {ok,N1} = ?t:start_node(NodeName,peer,[{start_cover,false}]),
+ %% CFun = fun() ->
+ %% [{ok,_} = cover:compile_beam(Mod) || Mod <- Mods]
+ %% end,
+ CFun = fun() -> cover:compile_beam(Mods) end,
+ {CT,CA} = timer:tc(CFun),
+% erlang:display(CA),
+ erlang:display({compile,CT}),
+
+ {SNT,_} = timer:tc(fun() -> {ok,[N1]} = cover:start(nodes()) end),
+ erlang:display({start_node,SNT}),
+
+ [1 = rpc:call(N1,Mod,f1,[1]) || Mod <- Mods],
+
+% Fun = fun() -> [cover:analyse(Mod,calls,function) || Mod<-Mods] end,
+% Fun = fun() -> analyse_all(Mods,calls,function) end,
+% Fun = fun() -> cover:analyse('_',calls,function) end,
+ Fun = fun() -> cover:analyse(Mods,calls,function) end,
+
+% Fun = fun() -> [begin cover:analyse_to_file(Mod,[html]) end || Mod<-Mods] end,
+% Fun = fun() -> analyse_all_to_file(Mods,[html]) end,
+% Fun = fun() -> cover:analyse_to_file(Mods,[html]) end,
+% Fun = fun() -> cover:analyse_to_file([html]) end,
+
+% Fun = fun() -> cover:reset() end,
+
+ {AT,A} = timer:tc(Fun),
+ erlang:display({analyse,AT}),
+% erlang:display(lists:sort([X || X={_MFA,N} <- lists:append([L || {ok,L}<-A]), N=/=0])),
+
+ %% fprof:apply(Fun, [],[{procs,[whereis(cover_server)]}]),
+ %% fprof:profile(),
+ %% fprof:analyse(dest,[]),
+
+ {SNT2,_} = timer:tc(fun() -> ?t:stop_node(N1) end),
+ erlang:display({stop_node,SNT2}),
+
+ code:del_path(Dir),
+ Files = filelib:wildcard(AllFiles),
+ [ok = file:delete(File) || File <- Files],
+ ok = file:del_dir(Dir),
+ ok.
+
+%% Run analysis in parallel
+analyse_all(Mods,Analysis,Level) ->
+ Pids = [begin
+ Pid = spawn(fun() ->
+ {ok,A} = cover:analyse(Mod,Analysis,Level),
+ exit(A)
+ end),
+ erlang:monitor(process,Pid),
+ Pid
+ end || Mod <- Mods],
+ get_downs(Pids,[]).
+
+analyse_all_to_file(Mods,Opts) ->
+ Pids = [begin
+ Pid = cover:async_analyse_to_file(Mod,Opts),
+ erlang:monitor(process,Pid),
+ Pid
+ end || Mod <- Mods],
+ get_downs(Pids,[]).
+
+get_downs([],Acc) ->
+ Acc;
+get_downs(Pids,Acc) ->
+ receive
+ {'DOWN', _Ref, _Type, Pid, A} ->
+ get_downs(lists:delete(Pid,Pids),[A|Acc])
+ end.
+
+generate_modules(0,_,_,_) ->
+ [];
+generate_modules(M,F,C,Dir) ->
+ ModStr = "m" ++ integer_to_list(M),
+ Mod = list_to_atom(ModStr),
+ Src = ["-module(",ModStr,").\n"
+ "-compile(export_all).\n" |
+ generate_functions(F,C)],
+ Erl = filename:join(Dir,ModStr++".erl"),
+ ok = file:write_file(Erl,Src),
+ {ok,Mod} = compile:file(Erl,[{outdir,Dir},debug_info,report]),
+ [Mod | generate_modules(M-1,F,C,Dir)].
+
+generate_functions(0,_) ->
+ [];
+generate_functions(F,C) ->
+ Func = "f" ++ integer_to_list(F),
+ [generate_clauses(C,Func) | generate_functions(F-1,C)].
+
+generate_clauses(0,_) ->
+ [];
+generate_clauses(C,Func) ->
+ CStr = integer_to_list(C),
+ Sep = if C==1 -> "."; true -> ";" end,
+ [Func,"(",CStr,") -> ",CStr,Sep,"\n" |
+ generate_clauses(C-1,Func)].
+
+
export_import(suite) -> [];
export_import(Config) when is_list(Config) ->
?line DataDir = ?config(data_dir, Config),
@@ -788,10 +977,11 @@ otp_5031(Config) when is_list(Config) ->
Dog = ?t:timetrap(?t:seconds(10)),
- ?line {ok,N1} = ?t:start_node(cover_SUITE_distribution1,slave,[]),
+ {ok,N1} = ?t:start_node(cover_SUITE_otp_5031,slave,[]),
?line {ok,[N1]} = cover:start(N1),
?line {error,not_main_node} = rpc:call(N1,cover,modules,[]),
?line cover:stop(),
+ ?t:stop_node(N1),
?t:timetrap_cancel(Dog),
ok.
@@ -1005,6 +1195,7 @@ otp_7095(Config) when is_list(Config) ->
ok.
+
otp_8270(doc) ->
["OTP-8270. Bug."];
otp_8270(suite) -> [];
@@ -1020,7 +1211,7 @@ otp_8270(Config) when is_list(Config) ->
?line {ok,N3} = ?t:start_node(cover_n3,slave,As),
timer:sleep(500),
- cover:start(nodes()),
+ {ok,[_,_,_]} = cover:start(nodes()),
Test = <<
"-module(m).\n"
@@ -1058,6 +1249,7 @@ otp_8270(Config) when is_list(Config) ->
?line {N2,true} = {N2,is_list(N2_info)},
?line {N3,true} = {N3,is_list(N3_info)},
+ exit(Pid1,kill),
?line ?t:stop_node(N1),
?line ?t:stop_node(N2),
?line ?t:stop_node(N3),
@@ -1572,7 +1764,9 @@ is_unloaded(What) ->
end.
check_f_calls(F1,F2) ->
- {ok,[{{f,f1,0},F1},{{f,f2,0},F2}|_]} = cover:analyse(f,calls,function).
+ {ok,A} = cover:analyse(f,calls,function),
+ {_,F1} = lists:keyfind({f,f1,0},1,A),
+ {_,F2} = lists:keyfind({f,f2,0},1,A).
cover_which_nodes(Expected) ->
case cover:which_nodes() of
diff --git a/lib/wx/src/wxe_server.erl b/lib/wx/src/wxe_server.erl
index 465b9da2e0..153e2475ba 100644
--- a/lib/wx/src/wxe_server.erl
+++ b/lib/wx/src/wxe_server.erl
@@ -223,14 +223,18 @@ handle_connect(Object, #evh{handler=undefined, cb=Callback} = EvData0,
Error ->
{reply, Error, State0}
end;
-handle_connect(Object, EvData=#evh{handler=Handler},
+handle_connect(Object, EvData=#evh{handler=Handler},
From, State0 = #state{users=Users}) ->
%% Correct process is already listening just register it
put(Handler, From),
- User0 = #user{events=Listeners0} = gb_trees:get(From, Users),
- User = User0#user{events=[{Object,EvData}|Listeners0]},
- State = State0#state{users=gb_trees:update(From, User, Users)},
- {reply, ok, State}.
+ case gb_trees:lookup(From, Users) of
+ {value, User0 = #user{events=Listeners0}} ->
+ User = User0#user{events=[{Object,EvData}|Listeners0]},
+ State = State0#state{users=gb_trees:update(From, User, Users)},
+ {reply, ok, State};
+ none -> %% We are closing up the shop
+ {reply, {error, terminating}, State0}
+ end.
invoke_cb({{Ev=#wx{}, Ref=#wx_ref{}}, FunId,_}, _S) ->
%% Event callbacks
diff --git a/lib/wx/test/wx_event_SUITE.erl b/lib/wx/test/wx_event_SUITE.erl
index 2c6c59bb55..3252547c9b 100644
--- a/lib/wx/test/wx_event_SUITE.erl
+++ b/lib/wx/test/wx_event_SUITE.erl
@@ -336,12 +336,14 @@ connect_in_callback(Config) ->
end}]),
wxWindow:show(F1),
receive
- {continue, F1} -> Tester ! {continue, F1}
+ {continue, F1} ->
+ true = wxFrame:disconnect(F1, size),
+ Tester ! {continue, F1}
end
end,
- wxFrame:connect(Frame,size,
+ wxFrame:connect(Frame,show,
[{callback,
- fun(#wx{event=#wxSize{}},_SizeEv) ->
+ fun(#wx{event=#wxShow{}},_SizeEv) ->
io:format("Frame got size~n",[]),
spawn(TestWindow)
end}]),
diff --git a/otp_versions.table b/otp_versions.table
index 57533855e3..41c05e79d8 100644
--- a/otp_versions.table
+++ b/otp_versions.table
@@ -1,3 +1,4 @@
+OTP-17.4.1 : erts-6.3.1 inets-5.10.5 # asn1-3.0.3 common_test-1.9 compiler-5.0.3 cosEvent-2.1.15 cosEventDomain-1.1.14 cosFileTransfer-1.1.16 cosNotification-1.1.21 cosProperty-1.1.17 cosTime-1.1.14 cosTransactions-1.2.14 crypto-3.4.2 debugger-4.0.2 dialyzer-2.7.3 diameter-1.8 edoc-0.7.16 eldap-1.1 erl_docgen-0.3.7 erl_interface-3.7.20 et-1.5 eunit-2.2.9 gs-1.5.16 hipe-3.11.2 ic-4.3.6 jinterface-1.5.12 kernel-3.1 megaco-3.17.3 mnesia-4.12.4 observer-2.0.3 odbc-2.10.22 orber-3.7.1 os_mon-2.3 ose-1.0.2 otp_mibs-1.0.10 parsetools-2.0.12 percept-0.8.10 public_key-0.22.1 reltool-0.6.6 runtime_tools-1.8.15 sasl-2.4.1 snmp-5.1.1 ssh-3.1 ssl-5.3.8 stdlib-2.3 syntax_tools-1.6.17 test_server-3.7.2 tools-2.7.1 typer-0.9.8 webtool-0.8.10 wx-1.3.2 xmerl-1.3.7 :
OTP-17.4 : asn1-3.0.3 common_test-1.9 compiler-5.0.3 crypto-3.4.2 debugger-4.0.2 dialyzer-2.7.3 diameter-1.8 edoc-0.7.16 eldap-1.1 erl_docgen-0.3.7 erl_interface-3.7.20 erts-6.3 eunit-2.2.9 hipe-3.11.2 inets-5.10.4 jinterface-1.5.12 kernel-3.1 megaco-3.17.3 mnesia-4.12.4 observer-2.0.3 odbc-2.10.22 otp_mibs-1.0.10 parsetools-2.0.12 percept-0.8.10 runtime_tools-1.8.15 snmp-5.1.1 ssh-3.1 ssl-5.3.8 stdlib-2.3 syntax_tools-1.6.17 test_server-3.7.2 tools-2.7.1 wx-1.3.2 # cosEvent-2.1.15 cosEventDomain-1.1.14 cosFileTransfer-1.1.16 cosNotification-1.1.21 cosProperty-1.1.17 cosTime-1.1.14 cosTransactions-1.2.14 et-1.5 gs-1.5.16 ic-4.3.6 orber-3.7.1 os_mon-2.3 ose-1.0.2 public_key-0.22.1 reltool-0.6.6 sasl-2.4.1 typer-0.9.8 webtool-0.8.10 xmerl-1.3.7 :
OTP-17.3.4 : erts-6.2.1 # asn1-3.0.2 common_test-1.8.2 compiler-5.0.2 cosEvent-2.1.15 cosEventDomain-1.1.14 cosFileTransfer-1.1.16 cosNotification-1.1.21 cosProperty-1.1.17 cosTime-1.1.14 cosTransactions-1.2.14 crypto-3.4.1 debugger-4.0.1 dialyzer-2.7.2 diameter-1.7.1 edoc-0.7.15 eldap-1.0.4 erl_docgen-0.3.6 erl_interface-3.7.19 et-1.5 eunit-2.2.8 gs-1.5.16 hipe-3.11.1 ic-4.3.6 inets-5.10.3 jinterface-1.5.11 kernel-3.0.3 megaco-3.17.2 mnesia-4.12.3 observer-2.0.2 odbc-2.10.21 orber-3.7.1 os_mon-2.3 ose-1.0.2 otp_mibs-1.0.9 parsetools-2.0.11 percept-0.8.9 public_key-0.22.1 reltool-0.6.6 runtime_tools-1.8.14 sasl-2.4.1 snmp-5.1 ssh-3.0.8 ssl-5.3.7 stdlib-2.2 syntax_tools-1.6.16 test_server-3.7.1 tools-2.7 typer-0.9.8 webtool-0.8.10 wx-1.3.1 xmerl-1.3.7 :
OTP-17.3.3 : ssh-3.0.8 # asn1-3.0.2 common_test-1.8.2 compiler-5.0.2 cosEvent-2.1.15 cosEventDomain-1.1.14 cosFileTransfer-1.1.16 cosNotification-1.1.21 cosProperty-1.1.17 cosTime-1.1.14 cosTransactions-1.2.14 crypto-3.4.1 debugger-4.0.1 dialyzer-2.7.2 diameter-1.7.1 edoc-0.7.15 eldap-1.0.4 erl_docgen-0.3.6 erl_interface-3.7.19 erts-6.2 et-1.5 eunit-2.2.8 gs-1.5.16 hipe-3.11.1 ic-4.3.6 inets-5.10.3 jinterface-1.5.11 kernel-3.0.3 megaco-3.17.2 mnesia-4.12.3 observer-2.0.2 odbc-2.10.21 orber-3.7.1 os_mon-2.3 ose-1.0.2 otp_mibs-1.0.9 parsetools-2.0.11 percept-0.8.9 public_key-0.22.1 reltool-0.6.6 runtime_tools-1.8.14 sasl-2.4.1 snmp-5.1 ssl-5.3.7 stdlib-2.2 syntax_tools-1.6.16 test_server-3.7.1 tools-2.7 typer-0.9.8 webtool-0.8.10 wx-1.3.1 xmerl-1.3.7 :
diff --git a/system/doc/getting_started/conc_prog.xml b/system/doc/getting_started/conc_prog.xml
index 15feaa9044..2b64826a93 100644
--- a/system/doc/getting_started/conc_prog.xml
+++ b/system/doc/getting_started/conc_prog.xml
@@ -95,7 +95,7 @@ goodbye</pre>
the second a "goodbye", the first another "hello" and so forth.
But where did the &lt;0.63.0&gt; come from? The return value of a
function is of course the return value of the last "thing" in
- the function. The last thing in the function <c>start</c> is</p>
+ the function. The last thing in the function <c>start</c> is:</p>
<code type="none">
spawn(tut14, say_something, [goodbye, 3]).</code>
<p><c>spawn</c> returns a <em>process identifier</em>, or
@@ -166,11 +166,11 @@ Pong_PID = spawn(tut15, pong, [])</code>
<c>start</c> now creates another process "ping".</p>
<code type="none">
spawn(tut15, ping, [3, Pong_PID]),</code>
- <p>this process executes</p>
+ <p>This process executes:</p>
<code type="none">
tut15:ping(3, Pong_PID)</code>
<p>&lt;0.36.0&gt; is the return value from the <c>start</c> function.</p>
- <p>The process "pong" now does:</p>
+ <p>The process "pong" now does:</p>
<code type="none">
receive
finished ->
@@ -235,7 +235,7 @@ Ping_PID ! pong</code>
Pid ! Message</code>
<p>I.e. <c>Message</c> (any Erlang term) is sent to the process
with identity <c>Pid</c>.</p>
- <p>After sending the message <c>pong</c>, to the process "ping",
+ <p>After sending the message <c>pong</c> to the process "ping",
"pong" calls the <c>pong</c> function again, which causes it to
get back to the <c>receive</c> again and wait for another message.
Now let's look at the process "ping". Recall that it was started
@@ -253,7 +253,7 @@ Pong_PID ! {ping, self()},</code>
<p><c>self()</c> returns the pid of the process which executes
<c>self()</c>, in this case the pid of "ping". (Recall the code
for "pong", this will land up in the variable <c>Ping_PID</c> in
- the <c>receive</c> previously explained).</p>
+ the <c>receive</c> previously explained.)</p>
<p>"Ping" now waits for a reply from "pong":</p>
<code type="none">
receive
@@ -364,7 +364,7 @@ pong ! {ping, self()},</code>
<c>.erlang.cookie</c> in the directory you get to after executing
the command <c>cd</c> without any argument).
The <c>.erlang.cookie</c> file should contain one line with
- the same atom. For example on Linux or Unix in the OS shell:</p>
+ the same atom. For example, on Linux or Unix in the OS shell:</p>
<pre>
$ <input>cd</input>
$ <input>cat > .erlang.cookie</input>
@@ -373,7 +373,7 @@ $ <input>chmod 400 .erlang.cookie</input></pre>
<p>The <c>chmod</c> above make the <c>.erlang.cookie</c> file
accessible only by the owner of the file. This is a requirement.</p>
<p>When you start an Erlang system which is going to talk to other
- Erlang systems, you must give it a name, eg: </p>
+ Erlang systems, you must give it a name, e.g.: </p>
<pre>
$ <input>erl -sname my_name</input></pre>
<p>We will see more details of this later. If you want to
@@ -385,7 +385,7 @@ $ <input>erl -sname my_name</input></pre>
IP domain and we can use only the first component of the IP
address, if we want to use nodes in different domains we use
<c>-name</c> instead, but then all IP address must be given in
- full.</p>
+ full.)</p>
<p>Here is the ping pong example modified to run on two separate
nodes:</p>
<code type="none">
@@ -538,7 +538,7 @@ ping finished</pre>
<p>Before we start, let's note the following:</p>
<list type="bulleted">
<item>
- <p>This example will just show the message passing logic- no
+ <p>This example will just show the message passing logic - no
attempt at all has been made to provide a nice graphical user
interface. This can, of course, also be done in Erlang - but
that's another tutorial.</p>
@@ -571,7 +571,7 @@ ping finished</pre>
%%% already logged in at the same node, login will be rejected
%%% with a suitable error message.
%%% logoff()
-%%% Logs off anybody at at node
+%%% Logs off anybody at that node
%%% message(ToName, Message)
%%% sends Message to ToName. Error messages if the user of this
%%% function is not logged on or if ToName is not logged on at
diff --git a/system/doc/getting_started/records_macros.xml b/system/doc/getting_started/records_macros.xml
index 76e3d185fb..73c8ce5c8d 100644
--- a/system/doc/getting_started/records_macros.xml
+++ b/system/doc/getting_started/records_macros.xml
@@ -97,7 +97,7 @@
%%% with a suitable error message.
%%% logoff()
-%%% Logs off anybody at at node
+%%% Logs off anybody at that node
%%% message(ToName, Message)
%%% sends Message to ToName. Error messages if the user of this
@@ -284,7 +284,7 @@ server_transfer(From, Name, To, Message, User_List) ->
the record is referred to. If you leave out a field when creating
a record, it will get the value of the atom undefined. (*manual*)</p>
<p>Pattern matching with records is very similar to creating
- records. For example inside a <c>case</c> or <c>receive</c>:</p>
+ records. For example, inside a <c>case</c> or <c>receive</c>:</p>
<code type="none">
#message_to{to_name=ToName, message=Message} -></code>
<p>is the same as:</p>
diff --git a/system/doc/getting_started/robustness.xml b/system/doc/getting_started/robustness.xml
index 359ea93275..e8fb81d5e8 100644
--- a/system/doc/getting_started/robustness.xml
+++ b/system/doc/getting_started/robustness.xml
@@ -30,7 +30,7 @@
</header>
<p>There are several things which are wrong with
the <seealso marker="conc_prog#ex">messenger example</seealso> from
- the previous chapter. For example if a node where a user is logged
+ the previous chapter. For example, if a node where a user is logged
on goes down without doing a log off, the user will remain in
the server's <c>User_List</c> but the client will disappear thus
making it impossible for the user to log on again as the server
@@ -214,9 +214,9 @@ Ping received pong</pre>
signal to be sent to "pong" which will also terminate.</p>
<p>It is possible to modify the default behaviour of a process so
that it does not get killed when it receives abnormal exit
- signals, but all signals will be turned into normal messages on
+ signals, but all signals will be turned into normal messages with
the format <c>{'EXIT',FromPID,Reason}</c> and added to the end of
- the receiving processes message queue. This behaviour is set by:</p>
+ the receiving process' message queue. This behaviour is set by:</p>
<code type="none">
process_flag(trap_exit, true)</code>
<p>There are several other process flags, see
@@ -289,7 +289,7 @@ pong exiting, got {'EXIT',&lt;3820.39.0>,ping}</pre>
%%% already logged in at the same node, login will be rejected
%%% with a suitable error message.
%%% logoff()
-%%% Logs off anybody at at node
+%%% Logs off anybody at that node
%%% message(ToName, Message)
%%% sends Message to ToName. Error messages if the user of this
%%% function is not logged on or if ToName is not logged on at
diff --git a/system/doc/getting_started/seq_prog.xml b/system/doc/getting_started/seq_prog.xml
index be43e8d896..699b9487ed 100644
--- a/system/doc/getting_started/seq_prog.xml
+++ b/system/doc/getting_started/seq_prog.xml
@@ -62,7 +62,7 @@ Eshell V5.9.1 (abort with ^G)
(See the chapter <seealso marker="erts:tty">"tty - A command line interface"</seealso> in ERTS User's Guide).</p>
<p>(Note: you will find a lot of line numbers given by the shell
out of sequence in this tutorial as it was written and the code
- tested in several sessions).</p>
+ tested in several sessions.)</p>
<p>Now let's try a more complex calculation.</p>
<pre>
2> <input>(42 + 77) * 66 / 3.</input>
@@ -115,7 +115,7 @@ double(X) ->
entered and there will also be error messages to give you some
idea as to what has gone wrong so you can change what you have
written and try again.</p>
- <p>Now lets run the program.</p>
+ <p>Now let's run the program.</p>
<pre>
4> <input>tut:double(10).</input>
20</pre>
@@ -208,7 +208,7 @@ mult(X, Y) ->
called variables. Variables must start with a capital letter
(see the chapter
<seealso marker="doc/reference_manual:expressions">"Variables"</seealso>
- in the Erlang Reference Manual). Examples of variable could be
+ in the Erlang Reference Manual). Examples of variables could be
<c>Number</c>, <c>ShoeSize</c>, <c>Age</c> etc.</p>
</section>
@@ -271,7 +271,7 @@ convert(N, centimeter) ->
Consider:</p>
<code type="none">
tut2:convert(3, inch).</code>
- <p>Does this mean that 3 is in inches? or that 3 is in centimeters
+ <p>Does this mean that 3 is in inches? Or that 3 is in centimeters
and we want to convert it to inches? So Erlang has a way to group
things together to make things more understandable. We call these
<em>tuples</em>. Tuples are surrounded by "{" and "}".</p>
@@ -309,7 +309,7 @@ convert_length({inch, Y}) ->
<p>We have shown tuples with two parts above, but tuples can have
as many parts as we want and contain any valid Erlang
<em>term</em>. For example, to represent the temperature of
- various cities of the world we could write</p>
+ various cities of the world we could write:</p>
<code type="none">
{moscow, {c, -10}}
{cape_town, {f, 70}}
@@ -325,7 +325,7 @@ convert_length({inch, Y}) ->
<title>Lists</title>
<p>Whereas tuples group things together, we also want to be able to
represent lists of things. Lists in Erlang are surrounded by "["
- and "]". For example a list of the temperatures of various cities
+ and "]". For example, a list of the temperatures of various cities
in the world could be:</p>
<code type="none">
[{moscow, {c, -10}}, {cape_town, {f, 70}}, {stockholm, {c, -4}},
@@ -345,7 +345,7 @@ convert_length({inch, Y}) ->
[2,3,4,5]</pre>
<p>We use | to separate the first elements of the list from
the rest of the list. (<c>First</c> has got value 1 and
- <c>TheRest</c> value [2,3,4,5]).</p>
+ <c>TheRest</c> value [2,3,4,5].)</p>
<p>Another example:</p>
<pre>
20> <input>[E1, E2 | R] = [1,2,3,4,5,6,7].</input>
@@ -403,7 +403,7 @@ list_length([First | Rest]) ->
the remaining elements <c>Rest</c> is 1 + the length of
<c>Rest</c>.</p>
<p>(Advanced readers only: This is not tail recursive, there is a
- better way to write this function).</p>
+ better way to write this function.)</p>
<p>In general we can say we use tuples where we would use "records"
or "structs" in other languages and we use lists when we want to
represent things which have varying sizes, (i.e. where we would
@@ -573,10 +573,9 @@ http://www.erlang.org/doc/r9b/doc/index.html</code>
<section>
<title>Writing Output to a Terminal</title>
<p>It's nice to be able to do formatted output in these example, so
- the next example shows a simple way to use to use
- the <c>io:format</c> function. Of course, just like all other
- exported functions, you can test the <c>io:format</c> function in
- the shell:</p>
+ the next example shows a simple way to use the <c>io:format</c>
+ function. Of course, just like all other exported functions, you
+ can test the <c>io:format</c> function in the shell:</p>
<pre>
31> <input>io:format("hello world~n", []).</input>
hello world
@@ -673,7 +672,7 @@ ok</pre>
<p>Now we call <c>format_temps(Rest)</c> with the rest of the list
as an argument. This way of doing things is similar to the loop
constructs in other languages. (Yes, this is recursion, but don't
- let that worry you). So the same <c>format_temps</c> function is
+ let that worry you.) So the same <c>format_temps</c> function is
called again, this time <c>City</c> gets the value
<c>{cape_town,{f,70}}</c> and we repeat the same procedure as
before. We go on doing this until the list becomes empty, i.e. [],
@@ -737,12 +736,12 @@ list_max([Head|Rest], Result_so_far) ->
the next part of the function.</p>
<p>Some useful operators in guards are, &lt; less than, &gt;
greater than, == equal, &gt;= greater or equal, =&lt; less or
- equal, /= not equal. (see the chapter
- <seealso marker="doc/reference_manual:expressions">"Guard Sequences"</seealso> in the Erlang Reference Manual).</p>
+ equal, /= not equal. (See the chapter
+ <seealso marker="doc/reference_manual:expressions">"Guard Sequences"</seealso> in the Erlang Reference Manual.)</p>
<p>To change the above program to one which works out the minimum
value of the element in a list, all we would need to do is to
write &lt; instead of &gt;. (But it would be wise to change
- the name of the function to <c>list_min</c> :-).</p>
+ the name of the function to <c>list_min</c> :-).)</p>
<p>Remember that I mentioned earlier that a variable could only be
given a value once in its scope? In the above we see, for example,
that <c>Result_so_far</c> has been given several values. This is
@@ -846,7 +845,7 @@ reverse([], [3,2,1]) =>
write a list manipulating function it is a good idea to check
that one isn't already written for you. (see
<seealso marker="stdlib:lists">lists(3)</seealso>).</p>
- <p>Now lets get back to the cities and temperatures, but take a more
+ <p>Now let's get back to the cities and temperatures, but take a more
structured approach this time. First let's convert the whole list
to Celsius as follows and test the function:</p>
<code type="none">
@@ -890,7 +889,7 @@ format_temps(List_of_cities) ->
<code type="none">
[City | convert_list_to_c(Rest)];</code>
<p>We go on doing this until we get to the end of the list (i.e.
- the list is empty:</p>
+ the list is empty):</p>
<code type="none">
convert_list_to_c([]) ->
[].</code>
@@ -1187,11 +1186,11 @@ trunc(5.0) = 5
<pre>
74> <input>2004 rem 400.</input>
4</pre>
- <p>so instead of writing</p>
+ <p>so instead of writing:</p>
<code type="none">
trunc(Year / 400) * 400 == Year ->
leap;</code>
- <p>we could write</p>
+ <p>we could write:</p>
<code type="none">
Year rem 400 == 0 ->
leap;</code>
@@ -1201,7 +1200,7 @@ Year rem 400 == 0 ->
(see the chapter
<seealso marker="doc/reference_manual:expressions">"Guard Sequences"</seealso> in the Erlang Reference Manual) (Aside for
advanced readers: This is to ensure that guards don't have side
- effects). Let's play with a few of these functions in the shell:</p>
+ effects.) Let's play with a few of these functions in the shell:</p>
<pre>
75> <input>trunc(5.6).</input>
5
@@ -1268,7 +1267,7 @@ map(Fun, []) ->
#Fun&lt;erl_eval.5.123085357&gt;
89> <input>lists:map(Add_3, [1,2,3]).</input>
[4,5,6]</pre>
- <p>Now lets print out the temperatures in a list of cities (yet
+ <p>Now let's print out the temperatures in a list of cities (yet
again):</p>
<pre>
90> <input>Print_City = fun({City, {X, Temp}}) -> io:format("~-15w ~w ~w~n",</input>
diff --git a/system/doc/programming_examples/fun_test.erl b/system/doc/programming_examples/fun_test.erl
index 8472fd87f8..8a3b0106c0 100644
--- a/system/doc/programming_examples/fun_test.erl
+++ b/system/doc/programming_examples/fun_test.erl
@@ -1,17 +1,11 @@
%1
-module(fun_test).
--export([t1/0, t2/0, t3/0, t4/0, double/1]).
+-export([t1/0, t2/0]).
-import(lists, [map/2]).
t1() -> map(fun(X) -> 2 * X end, [1,2,3,4,5]).
t2() -> map(fun double/1, [1,2,3,4,5]).
-t3() -> map({?MODULE, double}, [1,2,3,4,5]).
-
double(X) -> X * 2.
%1
-
-
-t4() ->
- "hello world".
diff --git a/system/doc/reference_manual/modules.xml b/system/doc/reference_manual/modules.xml
index 5fc8b363f8..5cb0c11371 100644
--- a/system/doc/reference_manual/modules.xml
+++ b/system/doc/reference_manual/modules.xml
@@ -194,8 +194,7 @@ behaviour_info(callbacks) -> Callbacks.</pre>
</p>
<pre>
-type my_type() :: atom() | integer().
--spec my_function(integer()) -> integer().
- </pre>
+-spec my_function(integer()) -> integer().</pre>
<p>Read more in <seealso marker="typespec">Types and Function specifications</seealso>.
</p>
<p>
diff --git a/system/doc/reference_manual/typespec.xml b/system/doc/reference_manual/typespec.xml
index e4aa2ceda6..d1584d2b98 100644
--- a/system/doc/reference_manual/typespec.xml
+++ b/system/doc/reference_manual/typespec.xml
@@ -264,8 +264,10 @@
its violation results in a compilation error.
</p>
<note>
- The following built-in list types also exist,
- but they are expected to be rarely used. Hence, they have long names:
+ <p>
+ The following built-in list types also exist,
+ but they are expected to be rarely used. Hence, they have long names:
+ </p>
</note>
<pre>
nonempty_maybe_improper_list() :: nonempty_maybe_improper_list(any(), any())
@@ -504,7 +506,9 @@
-spec foo({X, integer()}) -> X when X :: atom()
; ([Y]) -> Y when Y :: number().</pre>
<note>
+ <p>
For backwards compatibility the following form is also allowed:
+ </p>
<pre> -spec id(X) -> X when is_subtype(X, tuple()).</pre>
<p>
but its use is discouraged. It will be taken out in a future
diff --git a/system/doc/system_principles/system_principles.xml b/system/doc/system_principles/system_principles.xml
index 70c69b1dab..79ed86cd9f 100644
--- a/system/doc/system_principles/system_principles.xml
+++ b/system/doc/system_principles/system_principles.xml
@@ -109,7 +109,7 @@ init:stop()</pre>
<p>Loads the code for and starts the applications Kernel and
STDLIB, skips loading the <c>.erlang</c> file.
Useful for scripts and other tools that should be behave the
- same irregardless of user preferences.
+ same regardless of user preferences.
</p>
</item>
</taglist>
diff --git a/xcomp/erl-xcomp-powerpc-dso-linux-gnu.conf b/xcomp/erl-xcomp-powerpc-dso-linux-gnu.conf
index c245b493a5..cc6c5a9678 100644
--- a/xcomp/erl-xcomp-powerpc-dso-linux-gnu.conf
+++ b/xcomp/erl-xcomp-powerpc-dso-linux-gnu.conf
@@ -55,7 +55,7 @@ erl_xcomp_build=guess
# It does not have to be a full `CPU-VENDOR-OS' triplet, but can be. The
# full `CPU-VENDOR-OS' triplet will be created by
# `$ERL_TOP/erts/autoconf/config.sub $erl_xcomp_host'.
-erl_xcomp_host=powerpc-linux-gnu
+erl_xcomp_host=powerpc-wrs-linux-gnu
# * `erl_xcomp_configure_flags' - Extra configure flags to pass to the
# `configure' script.
@@ -70,10 +70,10 @@ erl_xcomp_configure_flags="--without-termcap"
## All variables in this section can also be used when native compiling.
# * `CC' - C compiler.
-CC=powerpc-linux-gnu-gcc
+CC=powerpc-wrs-linux-gnu-gcc
# * `CFLAGS' - C compiler flags.
-#CFLAGS=
+CFLAGS="-O2 --sysroot=/ldisk/cross/gcc-toolchain/sysroot -Wall -g"
# * `STATIC_CFLAGS' - Static C compiler flags.
#STATIC_CFLAGS=
@@ -84,22 +84,22 @@ CC=powerpc-linux-gnu-gcc
#CFLAG_RUNTIME_LIBRARY_PATH=
# * `CPP' - C pre-processor.
-#CPP=
+CPP="powerpc-wrs-linux-gnu-cpp "
# * `CPPFLAGS' - C pre-processor flags.
-#CPPFLAGS=
+CPPFLAGS="--sysroot=/ldisk/cross/gcc-toolchain/sysroot"
# * `CXX' - C++ compiler.
-CXX=powerpc-linux-gnu-g++
+CXX="powerpc-wrs-linux-gnu-g++"
# * `CXXFLAGS' - C++ compiler flags.
-#CXXFLAGS=
+CXXFLAGS="--sysroot=/ldisk/cross/gcc-toolchain/sysroot"
# * `LD' - Linker.
-LD=powerpc-linux-gnu-ld
+LD="powerpc-wrs-linux-gnu-gcc"
# * `LDFLAGS' - Linker flags.
-#LDFLAGS=
+LDFLAGS="--sysroot=/ldisk/cross/gcc-toolchain/sysroot"
# * `LIBS' - Libraries.
#LIBS=
@@ -109,14 +109,14 @@ LD=powerpc-linux-gnu-ld
## *NOTE*! Either set all or none of the `DED_LD*' variables.
# * `DED_LD' - Linker for Dynamically loaded Erlang Drivers.
-#DED_LD=
+DED_LD="powerpc-wrs-linux-gnu-gcc"
# * `DED_LDFLAGS' - Linker flags to use with `DED_LD'.
-#DED_LDFLAGS=
+DED_LDFLAGS="--sysroot=/ldisk/cross/gcc-toolchain/sysroot -shared -Wl,-Bsymbolic"
# * `DED_LD_FLAG_RUNTIME_LIBRARY_PATH' - This flag should set runtime library
# search path for shared libraries when linking with `DED_LD'.
-#DED_LD_FLAG_RUNTIME_LIBRARY_PATH=
+DED_LD_FLAG_RUNTIME_LIBRARY_PATH="-Wl,-R"
## -- Large File Support --
@@ -134,10 +134,10 @@ LD=powerpc-linux-gnu-ld
## -- Other Tools --
# * `RANLIB' - `ranlib' archive index tool.
-RANLIB=powerpc-linux-gnu-ranlib
+RANLIB=powerpc-wrs-linux-gnu-ranlib
# * `AR' - `ar' archiving tool.
-AR=powerpc-linux-gnu-ar
+AR=powerpc-wrs-linux-gnu-ar
# * `GETCONF' - `getconf' system configuration inspection tool. `getconf' is
# currently used for finding out large file support flags to use, and