aboutsummaryrefslogtreecommitdiffstats
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/asn1/src/Makefile1
-rw-r--r--lib/asn1/src/asn1_db.erl8
-rw-r--r--lib/asn1/src/asn1_records.hrl16
-rw-r--r--lib/asn1/src/asn1ct.erl71
-rw-r--r--lib/asn1/src/asn1ct_check.erl5271
-rw-r--r--lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl13
-rw-r--r--lib/asn1/src/asn1ct_constructed_per.erl155
-rw-r--r--lib/asn1/src/asn1ct_gen.erl75
-rw-r--r--lib/asn1/src/asn1ct_gen_ber_bin_v2.erl109
-rw-r--r--lib/asn1/src/asn1ct_imm.erl9
-rw-r--r--lib/asn1/src/asn1ct_parser.yrl1177
-rw-r--r--lib/asn1/src/asn1ct_parser2.erl2517
-rw-r--r--lib/asn1/src/asn1ct_tok.erl332
-rw-r--r--lib/asn1/test/Makefile10
-rw-r--r--lib/asn1/test/asn1_SUITE.erl120
-rw-r--r--lib/asn1/test/asn1_SUITE_data/BadTypeEnding.asn6
-rw-r--r--lib/asn1/test/asn1_SUITE_data/BadValueAssignment1.asn18
-rw-r--r--lib/asn1/test/asn1_SUITE_data/BadValueAssignment2.asn18
-rw-r--r--lib/asn1/test/asn1_SUITE_data/BadValueSet.asn19
-rw-r--r--lib/asn1/test/asn1_SUITE_data/CCSNARG3.asn2
-rw-r--r--lib/asn1/test/asn1_SUITE_data/ChoExtension.asn16
-rw-r--r--lib/asn1/test/asn1_SUITE_data/ChoiceBadExtension.asn127
-rw-r--r--lib/asn1/test/asn1_SUITE_data/ConstraintEquivalence.asn160
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Constraints.py44
-rw-r--r--lib/asn1/test/asn1_SUITE_data/CoverParser.asn157
-rw-r--r--lib/asn1/test/asn1_SUITE_data/DoubleEllipses.asn19
-rw-r--r--lib/asn1/test/asn1_SUITE_data/EnumExt.asn12
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Example.asn120
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Export1.asn7
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Exporting.asn118
-rw-r--r--lib/asn1/test/asn1_SUITE_data/ExtensibilityImplied.asn130
-rw-r--r--lib/asn1/test/asn1_SUITE_data/IllegalExport.asn17
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Importing.asn120
-rw-r--r--lib/asn1/test/asn1_SUITE_data/InfObj.asn188
-rw-r--r--lib/asn1/test/asn1_SUITE_data/InfObjExtract.asn1136
-rw-r--r--lib/asn1/test/asn1_SUITE_data/MissingEnd.asn15
-rw-r--r--lib/asn1/test/asn1_SUITE_data/ObjIdValues.asn11
-rw-r--r--lib/asn1/test/asn1_SUITE_data/ParamBasic.asn133
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Prim.asn12
-rw-r--r--lib/asn1/test/asn1_SUITE_data/SelectionType.asn8
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Seq.py5
-rw-r--r--lib/asn1/test/asn1_SUITE_data/SeqOptional2.asn6
-rw-r--r--lib/asn1/test/asn1_SUITE_data/SequenceBadComma.asn10
-rw-r--r--lib/asn1/test/asn1_SUITE_data/SequenceBadComponentName.asn110
-rw-r--r--lib/asn1/test/asn1_SUITE_data/SequenceBadComponentType.asn110
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Syntax.py10
-rw-r--r--lib/asn1/test/asn1_SUITE_data/ValueTest.asn96
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/ACSE-1.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/ACSE-1.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/AlgorithmInformation-2009.asn1466
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/AttributeCertificateVersion1-2009.asn159
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/AuthenticationFramework.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/AuthenticationFramework.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/BasicAccessControl.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/BasicAccessControl.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/CertificateExtensions.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/CertificateExtensions.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Character-Coding-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Character-Coding-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Character-Presentation-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Character-Presentation-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Character-Profile-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Character-Profile-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Colour-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Colour-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntax-2009.asn1463
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntaxAlgorithms-2009.asn1248
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DOR-definition.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DOR-definition.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DSAOperationalAttributeTypes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DSAOperationalAttributeTypes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Default-Value-Lists.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Default-Value-Lists.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryAbstractService.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DirectoryAbstractService.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryAccessProtocol.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DirectoryAccessProtocol.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryInformationShadowProtocol.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DirectoryInformationShadowProtocol.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryOperationalBindingManagementProtocol.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DirectoryOperationalBindingManagementProtocol.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryOperationalBindingTypes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DirectoryOperationalBindingTypes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryProtectionMappings.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DirectoryProtectionMappings.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryShadowAbstractService.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DirectoryShadowAbstractService.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DirectorySystemProtocol.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DirectorySystemProtocol.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DistributedOperations.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DistributedOperations.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Document-Profile-Descriptor.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Document-Profile-Descriptor.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/EnhancedSecurity.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/EnhancedSecurity.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/EnrollmentMessageSyntax-2009.asn1543
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/External-References.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/External-References.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/GULSProtectionMappings.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/GULSProtectionMappings.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/GenericProtectingTransferSyntax.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/GenericProtectingTransferSyntax.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Coding-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Coding-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Presentation-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Presentation-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Profile-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Profile-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/GulsSecurityExchanges.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/GulsSecurityExchanges.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/GulsSecurityTransformations.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/GulsSecurityTransformations.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/HierarchicalOperationalBindings.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/HierarchicalOperationalBindings.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSAbstractService.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSAbstractService.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSAutoActionTypes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSAutoActionTypes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedBodyPartTypes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedBodyPartTypes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedBodyPartTypes2.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedBodyPartTypes2.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedVoiceBodyPartType.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedVoiceBodyPartType.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSFileTransferBodyPartType.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSFileTransferBodyPartType.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSForwardedContentBodyPartType.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSForwardedContentBodyPartType.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSForwardedReportBodyPartType.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSForwardedReportBodyPartType.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSFunctionalObjects.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSFunctionalObjects.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSHeadingExtensions.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSHeadingExtensions.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSInformationObjects.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSInformationObjects.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSMessageStoreAttributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSMessageStoreAttributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSObjectIdentifiers.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSObjectIdentifiers.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSObjectIdentifiers2.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSObjectIdentifiers2.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSSecurityExtensions.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSSecurityExtensions.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSUpperBounds.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSUpperBounds.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/ISO-STANDARD-9541-FONT-ATTRIBUTE-SET.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/ISO-STANDARD-9541-FONT-ATTRIBUTE-SET.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/ISO8571-FTAM.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/ISO8571-FTAM.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/ISO9541-SN.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/ISO9541-SN.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Identifiers-and-Expressions.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Identifiers-and-Expressions.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/InformationFramework.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/InformationFramework.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Interchange-Data-Elements.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Interchange-Data-Elements.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Layout-Descriptors.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Layout-Descriptors.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Link-Descriptors.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Link-Descriptors.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Location-Expressions.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Location-Expressions.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Logical-Descriptors.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Logical-Descriptors.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MHSObjectIdentifiers.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MHSObjectIdentifiers.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MHSProtocolObjectIdentifiers.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MHSProtocolObjectIdentifiers.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MSAbstractService.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MSAbstractService.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MSAccessProtocol.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MSAccessProtocol.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MSGeneralAttributeTypes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MSGeneralAttributeTypes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MSGeneralAutoActionTypes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MSGeneralAutoActionTypes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MSMatchingRules.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MSMatchingRules.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MSObjectIdentifiers.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MSObjectIdentifiers.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MSUpperBounds.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MSUpperBounds.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MTAAbstractService.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MTAAbstractService.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MTSAbstractService.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MTSAbstractService.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MTSAbstractService88.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MTSAbstractService88.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MTSAccessProtocol.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MTSAccessProtocol.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MTSObjectIdentifiers.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MTSObjectIdentifiers.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MTSUpperBounds.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MTSUpperBounds.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Notation.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Notation.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/OCSP-2009.asn1183
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/OLD-PKCS7.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/PKCS7.asn)2
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/ObjectIdentifiers.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/ObjectIdentifiers.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/OperationalBindingManagement.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/OperationalBindingManagement.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-10.asn156
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-12.asn1174
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-5.asn1202
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-7.asn1326
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-8.asn161
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-9.asn1391
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKCS7BodyPartType.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/PKCS7BodyPartType.asn)2
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-CommonTypes-2009.asn1166
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-X400Address-2009.asn1300
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1-PSS-OAEP-Algorithms-2009.asn1308
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Explicit-2009.asn1415
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Implicit-2009.asn1447
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAlgs-2009.asn1528
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAttributeCertificate-2009.asn1292
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCMP-2009.asn1495
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCRMF-2009.asn1409
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Protected-Part-Descriptors.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Protected-Part-Descriptors.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/ProtocolObjectIdentifiers.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/ProtocolObjectIdentifiers.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Coding-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Coding-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Presentation-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Presentation-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Profile-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Profile-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Reliable-Transfer-APDU.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Reliable-Transfer-APDU.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Abstract-Syntaxes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Abstract-Syntaxes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Generic-ROS-PDUs.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Generic-ROS-PDUs.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Information-Objects-extensions.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Information-Objects-extensions.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Information-Objects.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Information-Objects.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Realizations.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Realizations.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Useful-Definitions.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Useful-Definitions.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/SCVP-2009.asn1608
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/SecureMimeMessageV3dot1-2009.asn1122
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/SelectedAttributeTypes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/SelectedAttributeTypes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/SeseAPDUs.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/SeseAPDUs.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/SpkmGssTokens.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/SpkmGssTokens.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Style-Descriptors.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Style-Descriptors.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Subprofiles.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Subprofiles.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Temporal-Relationships.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Temporal-Relationships.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Text-Units.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Text-Units.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/UpperBounds.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/UpperBounds.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/UsefulDefinitions.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/UsefulDefinitions.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Videotex-Coding-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Videotex-Coding-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_test_lib.erl25
-rw-r--r--lib/asn1/test/error_SUITE.erl725
-rw-r--r--lib/asn1/test/syntax_SUITE.erl340
-rw-r--r--lib/asn1/test/testChoExtension.erl5
-rw-r--r--lib/asn1/test/testConstraints.erl22
-rw-r--r--lib/asn1/test/testDoubleEllipses.erl6
-rw-r--r--lib/asn1/test/testEnumExt.erl28
-rw-r--r--lib/asn1/test/testExtensibilityImplied.erl29
-rw-r--r--lib/asn1/test/testImporting.erl34
-rw-r--r--lib/asn1/test/testInfObj.erl39
-rw-r--r--lib/asn1/test/testInfObjExtract.erl72
-rw-r--r--lib/asn1/test/testParamBasic.erl8
-rw-r--r--lib/asn1/test/testPrim.erl5
-rw-r--r--lib/asn1/test/testRfcs.erl75
-rw-r--r--lib/asn1/test/testSelectionTypes.erl28
-rw-r--r--lib/asn1/test/testUniqueObjectSets.erl175
-rw-r--r--lib/asn1/test/testValueTest.erl114
-rw-r--r--lib/asn1/test/testX420.erl93
-rw-r--r--lib/asn1/test/test_compile_options.erl39
-rw-r--r--lib/common_test/priv/Makefile.in2
-rw-r--r--lib/common_test/src/ct_cover.erl127
-rw-r--r--lib/common_test/src/ct_framework.erl6
-rw-r--r--lib/common_test/src/ct_logs.erl31
-rw-r--r--lib/common_test/src/ct_release_test.erl137
-rw-r--r--lib/common_test/src/ct_run.erl24
-rw-r--r--lib/common_test/src/ct_telnet.erl3
-rw-r--r--lib/common_test/test/ct_cover_SUITE.erl79
-rw-r--r--lib/common_test/test/ct_cover_SUITE_data/cover_SUITE.erl4
-rw-r--r--lib/common_test/test/ct_event_handler_SUITE.erl11
-rw-r--r--lib/common_test/test/ct_event_handler_SUITE_data/event_handling_1/test/eh_11_SUITE.erl9
-rw-r--r--lib/common_test/test/ct_hooks_SUITE.erl32
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_cth_fail_one_skip_one_SUITE.erl9
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/empty_cth.erl15
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/minimal_terminate_cth.erl4
-rw-r--r--lib/common_test/test/ct_telnet_SUITE_data/ct_telnet_own_server_SUITE.erl26
-rw-r--r--lib/compiler/src/Makefile2
-rw-r--r--lib/compiler/src/beam_a.erl3
-rw-r--r--lib/compiler/src/beam_block.erl6
-rw-r--r--lib/compiler/src/beam_bool.erl75
-rw-r--r--lib/compiler/src/beam_clean.erl25
-rw-r--r--lib/compiler/src/beam_dead.erl676
-rw-r--r--lib/compiler/src/beam_flatten.erl4
-rw-r--r--lib/compiler/src/beam_jump.erl63
-rw-r--r--lib/compiler/src/beam_split.erl5
-rw-r--r--lib/compiler/src/beam_type.erl32
-rw-r--r--lib/compiler/src/beam_utils.erl43
-rw-r--r--lib/compiler/src/beam_validator.erl330
-rw-r--r--lib/compiler/src/beam_z.erl14
-rw-r--r--lib/compiler/src/cerl.erl59
-rw-r--r--lib/compiler/src/cerl_clauses.erl46
-rw-r--r--lib/compiler/src/compiler.app.src1
-rw-r--r--lib/compiler/src/core_lib.erl64
-rw-r--r--lib/compiler/src/core_lint.erl2
-rw-r--r--lib/compiler/src/core_parse.hrl3
-rw-r--r--lib/compiler/src/core_parse.yrl98
-rw-r--r--lib/compiler/src/core_pp.erl22
-rw-r--r--lib/compiler/src/core_scan.erl6
-rw-r--r--lib/compiler/src/erl_bifs.erl1
-rw-r--r--lib/compiler/src/sys_core_fold.erl1108
-rw-r--r--lib/compiler/src/sys_core_fold_lists.erl386
-rw-r--r--lib/compiler/src/sys_core_inline.erl8
-rw-r--r--lib/compiler/src/v3_codegen.erl16
-rw-r--r--lib/compiler/src/v3_core.erl328
-rw-r--r--lib/compiler/src/v3_kernel.erl152
-rw-r--r--lib/compiler/test/Makefile3
-rw-r--r--lib/compiler/test/andor_SUITE.erl10
-rw-r--r--lib/compiler/test/beam_utils_SUITE.erl236
-rw-r--r--lib/compiler/test/beam_validator_SUITE.erl212
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S6
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S2
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/bin_match.S64
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/compiler_bug.S38
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/dead_code.S25
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/erl_prim_loader.beambin17460 -> 0 bytes
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/freg_range.S4
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/freg_state.S2
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S14
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/illegal_instruction.S26
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/map_field_lists.S29
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S3
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S4
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/stack.S4
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/undef_label.S22
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/uninit.S16
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/xrange.S4
-rw-r--r--lib/compiler/test/bs_bit_binaries_SUITE.erl2
-rw-r--r--lib/compiler/test/bs_construct_SUITE.erl2
-rw-r--r--lib/compiler/test/bs_match_SUITE.erl52
-rw-r--r--lib/compiler/test/compilation_SUITE.erl6
-rw-r--r--lib/compiler/test/compile_SUITE.erl89
-rw-r--r--lib/compiler/test/compile_SUITE_data/dialyzer_test.erl39
-rw-r--r--lib/compiler/test/core_SUITE_data/map_core_test.core12
-rw-r--r--lib/compiler/test/core_fold_SUITE.erl73
-rw-r--r--lib/compiler/test/float_SUITE.erl2
-rw-r--r--lib/compiler/test/guard_SUITE.erl275
-rw-r--r--lib/compiler/test/lc_SUITE.erl102
-rw-r--r--lib/compiler/test/map_SUITE.erl11
-rw-r--r--lib/compiler/test/match_SUITE.erl38
-rw-r--r--lib/compiler/test/misc_SUITE.erl17
-rw-r--r--lib/compiler/test/receive_SUITE.erl38
-rw-r--r--lib/compiler/test/record_SUITE.erl8
-rw-r--r--lib/compiler/test/test_lib.erl23
-rw-r--r--lib/compiler/test/trycatch_SUITE.erl20
-rw-r--r--lib/compiler/test/warnings_SUITE.erl59
-rw-r--r--lib/crypto/c_src/crypto.c91
-rw-r--r--lib/crypto/doc/src/crypto.xml36
-rw-r--r--lib/crypto/src/crypto.erl34
-rw-r--r--lib/crypto/test/crypto_SUITE.erl209
-rw-r--r--lib/debugger/src/dbg_wx_settings.erl10
-rw-r--r--lib/dialyzer/doc/src/dialyzer.xml73
-rw-r--r--lib/dialyzer/src/dialyzer.erl8
-rw-r--r--lib/dialyzer/src/dialyzer.hrl12
-rw-r--r--lib/dialyzer/src/dialyzer_analysis_callgraph.erl156
-rw-r--r--lib/dialyzer/src/dialyzer_behaviours.erl18
-rw-r--r--lib/dialyzer/src/dialyzer_cl.erl21
-rw-r--r--lib/dialyzer/src/dialyzer_codeserver.erl32
-rw-r--r--lib/dialyzer/src/dialyzer_contracts.erl125
-rw-r--r--lib/dialyzer/src/dialyzer_dataflow.erl157
-rw-r--r--lib/dialyzer/src/dialyzer_options.erl4
-rw-r--r--lib/dialyzer/src/dialyzer_races.erl17
-rw-r--r--lib/dialyzer/src/dialyzer_succ_typings.erl46
-rw-r--r--lib/dialyzer/src/dialyzer_typesig.erl6
-rw-r--r--lib/dialyzer/src/dialyzer_utils.erl325
-rw-r--r--lib/dialyzer/test/race_SUITE_data/src/ets_insert_args1_suppressed.erl19
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/blame_contract_range_suppressed2
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes36
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/request12
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/suppress_request6
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/big_external_type.erl528
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/big_local_type.erl525
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/blame_contract_range_suppressed.erl15
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/contracts_with_subtypes.erl12
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/ditrap.erl47
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/maps_redef2.erl23
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/request1.erl12
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/suppress_request.erl50
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/suppression1.erl33
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/suppression2.erl32
-rw-r--r--lib/diameter/doc/src/diameter.xml73
-rw-r--r--lib/diameter/examples/code/GNUmakefile4
-rw-r--r--lib/diameter/examples/code/client.erl39
-rw-r--r--lib/diameter/examples/code/node.erl174
-rw-r--r--lib/diameter/examples/code/peer.erl150
-rw-r--r--lib/diameter/examples/code/relay.erl33
-rw-r--r--lib/diameter/examples/code/server.erl31
-rw-r--r--lib/diameter/examples/code/server_cb.erl4
-rw-r--r--lib/diameter/include/diameter_gen.hrl57
-rw-r--r--lib/diameter/src/base/diameter.erl3
-rw-r--r--lib/diameter/src/base/diameter_codec.erl21
-rw-r--r--lib/diameter/src/base/diameter_config.erl10
-rw-r--r--lib/diameter/src/base/diameter_lib.erl158
-rw-r--r--lib/diameter/src/base/diameter_peer.erl6
-rw-r--r--lib/diameter/src/base/diameter_reg.erl7
-rw-r--r--lib/diameter/src/base/diameter_service.erl93
-rw-r--r--lib/diameter/src/base/diameter_service_sup.erl4
-rw-r--r--lib/diameter/src/base/diameter_session.erl4
-rw-r--r--lib/diameter/src/base/diameter_stats.erl6
-rw-r--r--lib/diameter/src/base/diameter_sup.erl4
-rw-r--r--lib/diameter/src/base/diameter_sync.erl5
-rw-r--r--lib/diameter/src/base/diameter_traffic.erl35
-rw-r--r--lib/diameter/src/base/diameter_types.erl2
-rw-r--r--lib/diameter/src/base/diameter_watchdog.erl5
-rw-r--r--lib/diameter/src/modules.mk4
-rw-r--r--lib/diameter/src/transport/diameter_sctp.erl272
-rw-r--r--lib/diameter/src/transport/diameter_tcp.erl37
-rw-r--r--lib/diameter/src/transport/diameter_transport_sup.erl4
-rw-r--r--lib/diameter/test/diameter_app_SUITE.erl43
-rw-r--r--lib/diameter/test/diameter_capx_SUITE.erl6
-rw-r--r--lib/diameter/test/diameter_codec_SUITE.erl188
-rw-r--r--lib/diameter/test/diameter_codec_test.erl7
-rw-r--r--lib/diameter/test/diameter_ct.erl6
-rw-r--r--lib/diameter/test/diameter_event_SUITE.erl9
-rw-r--r--lib/diameter/test/diameter_examples_SUITE.erl12
-rw-r--r--lib/diameter/test/diameter_gen_sctp_SUITE.erl39
-rw-r--r--lib/diameter/test/diameter_gen_tcp_SUITE.erl67
-rw-r--r--lib/diameter/test/diameter_pool_SUITE.erl133
-rw-r--r--lib/diameter/test/diameter_traffic_SUITE.erl30
-rw-r--r--lib/diameter/test/diameter_transport_SUITE.erl83
-rw-r--r--lib/diameter/test/diameter_util.erl79
-rw-r--r--lib/diameter/test/diameter_watchdog_SUITE.erl6
-rw-r--r--lib/diameter/test/modules.mk4
-rw-r--r--lib/edoc/doc/overview.edoc23
-rw-r--r--lib/edoc/include/edoc_doclet.hrl11
-rw-r--r--lib/edoc/priv/edoc.dtd9
-rw-r--r--lib/edoc/priv/stylesheet.css4
-rw-r--r--lib/edoc/src/edoc.erl174
-rw-r--r--lib/edoc/src/edoc.hrl7
-rw-r--r--lib/edoc/src/edoc_data.erl33
-rw-r--r--lib/edoc/src/edoc_doclet.erl106
-rw-r--r--lib/edoc/src/edoc_extract.erl19
-rw-r--r--lib/edoc/src/edoc_layout.erl26
-rw-r--r--lib/edoc/src/edoc_lib.erl157
-rw-r--r--lib/edoc/src/edoc_macros.erl4
-rw-r--r--lib/edoc/src/edoc_parser.yrl10
-rw-r--r--lib/edoc/src/edoc_refs.erl72
-rw-r--r--lib/edoc/src/edoc_run.erl30
-rw-r--r--lib/edoc/src/edoc_tags.erl22
-rw-r--r--lib/edoc/src/otpsgml_layout.erl24
-rw-r--r--lib/edoc/test/edoc_SUITE.erl21
-rw-r--r--lib/edoc/test/edoc_SUITE_data/myapp/doc/.dummy0
-rw-r--r--lib/edoc/test/edoc_SUITE_data/myapp/src/a.erl1
-rw-r--r--lib/edoc/test/edoc_SUITE_data/myapp/src/src_1/b.erl1
-rw-r--r--lib/eldap/src/eldap.erl2
-rw-r--r--lib/erl_docgen/priv/bin/specs_gen.escript2
-rwxr-xr-xlib/erl_docgen/priv/bin/xml_from_edoc.escript2
-rw-r--r--lib/hipe/cerl/erl_bif_types.erl10
-rw-r--r--lib/hipe/cerl/erl_types.erl847
-rw-r--r--lib/hipe/ppc/hipe_rtl_to_ppc.erl14
-rw-r--r--lib/hipe/rtl/hipe_rtl.erl18
-rw-r--r--lib/hipe/sparc/hipe_rtl_to_sparc.erl10
-rw-r--r--lib/hipe/x86/hipe_rtl_to_x86.erl15
-rw-r--r--lib/inets/doc/src/http_uri.xml11
-rw-r--r--lib/inets/doc/src/httpd.xml13
-rw-r--r--lib/inets/doc/src/notes.xml35
-rw-r--r--lib/inets/examples/httpd_load_test/hdlt_slave.erl2
-rw-r--r--lib/inets/src/http_client/httpc_cookie.erl20
-rw-r--r--lib/inets/src/http_lib/http_internal.hrl3
-rw-r--r--lib/inets/src/http_lib/http_request.erl26
-rw-r--r--lib/inets/src/http_lib/http_uri.erl35
-rw-r--r--lib/inets/src/http_server/httpd_conf.erl24
-rw-r--r--lib/inets/src/http_server/httpd_request.erl102
-rw-r--r--lib/inets/src/http_server/httpd_request_handler.erl32
-rw-r--r--lib/inets/src/http_server/mod_alias.erl14
-rw-r--r--lib/inets/test/http_format_SUITE.erl16
-rw-r--r--lib/inets/test/httpc_SUITE.erl52
-rw-r--r--lib/inets/test/httpd_SUITE.erl45
-rw-r--r--lib/inets/test/uri_SUITE.erl37
-rw-r--r--lib/inets/vsn.mk2
-rw-r--r--lib/kernel/doc/src/gen_sctp.xml2
-rw-r--r--lib/kernel/doc/src/inet.xml10
-rw-r--r--lib/kernel/doc/src/kernel_app.xml14
-rw-r--r--lib/kernel/src/application_controller.erl14
-rw-r--r--lib/kernel/src/file_io_server.erl114
-rw-r--r--lib/kernel/src/gen_udp.erl2
-rw-r--r--lib/kernel/src/hipe_unified_loader.erl17
-rw-r--r--lib/kernel/src/inet.erl4
-rw-r--r--lib/kernel/src/inet_tcp_dist.erl48
-rw-r--r--lib/kernel/src/standard_error.erl155
-rw-r--r--lib/kernel/test/Makefile3
-rw-r--r--lib/kernel/test/erl_distribution_SUITE.erl76
-rw-r--r--lib/kernel/test/file_SUITE.erl152
-rw-r--r--lib/kernel/test/inet_SUITE.erl19
-rw-r--r--lib/kernel/test/standard_error_SUITE.erl38
-rw-r--r--lib/kernel/test/zlib_SUITE.erl35
-rw-r--r--lib/mnesia/doc/src/Mnesia_chap5.xmlsrc1
-rw-r--r--lib/mnesia/doc/src/mnesia.xml16
-rw-r--r--lib/mnesia/src/mnesia_controller.erl29
-rw-r--r--lib/mnesia/src/mnesia_dumper.erl60
-rw-r--r--lib/mnesia/src/mnesia_loader.erl7
-rw-r--r--lib/mnesia/src/mnesia_locker.erl10
-rw-r--r--lib/mnesia/src/mnesia_monitor.erl4
-rw-r--r--lib/mnesia/src/mnesia_recover.erl31
-rw-r--r--lib/mnesia/test/mnesia_recovery_test.erl13
-rw-r--r--lib/mnesia/test/mnesia_test_lib.hrl10
-rw-r--r--lib/observer/doc/src/observer_ug.xml23
-rw-r--r--lib/observer/src/cdv_proc_cb.erl1
-rw-r--r--lib/observer/src/crashdump_viewer.erl48
-rw-r--r--lib/observer/src/crashdump_viewer.hrl4
-rw-r--r--lib/observer/src/observer_html_lib.erl8
-rw-r--r--lib/observer/src/observer_pro_wx.erl2
-rw-r--r--lib/observer/src/observer_procinfo.erl76
-rw-r--r--lib/observer/src/observer_wx.erl125
-rw-r--r--lib/observer/test/observer_SUITE.erl35
-rw-r--r--lib/os_mon/c_src/memsup.c2
-rw-r--r--lib/parsetools/include/leexinc.hrl8
-rw-r--r--lib/parsetools/src/yecc.erl18
-rw-r--r--lib/parsetools/test/yecc_SUITE.erl18
-rw-r--r--lib/public_key/doc/src/public_key.xml89
-rw-r--r--lib/public_key/src/pubkey_cert.erl21
-rw-r--r--lib/public_key/src/pubkey_crl.erl8
-rw-r--r--lib/public_key/src/public_key.erl103
-rw-r--r--lib/public_key/test/public_key_SUITE.erl40
-rw-r--r--lib/public_key/test/public_key_SUITE_data/crl_signer.pem25
-rw-r--r--lib/public_key/test/public_key_SUITE_data/idp_cert.pem30
-rw-r--r--lib/public_key/test/public_key_SUITE_data/idp_crl.pem18
-rw-r--r--lib/runtime_tools/src/dbg.erl60
-rw-r--r--lib/runtime_tools/test/dbg_SUITE.erl36
-rw-r--r--lib/sasl/test/release_handler_SUITE.erl18
-rw-r--r--lib/sasl/test/release_handler_SUITE_data/regexp_appup/app1/ebin/app1.app29
-rw-r--r--lib/sasl/test/release_handler_SUITE_data/regexp_appup/app1/ebin/app1.appup23
-rw-r--r--lib/ssh/doc/src/ssh.xml4
-rw-r--r--lib/ssh/doc/src/using_ssh.xml2
-rw-r--r--lib/ssh/src/ssh_connection_handler.erl2
-rw-r--r--lib/ssh/src/ssh_info.erl11
-rw-r--r--lib/ssh/src/ssh_sftpd.erl122
-rw-r--r--lib/ssh/test/ssh_basic_SUITE.erl39
-rw-r--r--lib/ssh/test/ssh_sftpd_SUITE.erl48
-rw-r--r--lib/ssl/doc/src/Makefile4
-rw-r--r--lib/ssl/doc/src/refman.xml19
-rw-r--r--lib/ssl/doc/src/ssl.xml132
-rw-r--r--lib/ssl/doc/src/ssl_app.xml13
-rw-r--r--lib/ssl/doc/src/ssl_crl_cache.xml66
-rw-r--r--lib/ssl/doc/src/ssl_crl_cache_api.xml97
-rw-r--r--lib/ssl/doc/src/ssl_session_cache_api.xml6
-rw-r--r--lib/ssl/src/Makefile9
-rw-r--r--lib/ssl/src/dtls_record.erl4
-rw-r--r--lib/ssl/src/ssl.app.src4
-rw-r--r--lib/ssl/src/ssl.appup.src8
-rw-r--r--lib/ssl/src/ssl.erl37
-rw-r--r--lib/ssl/src/ssl_alert.erl4
-rw-r--r--lib/ssl/src/ssl_alert.hrl4
-rw-r--r--lib/ssl/src/ssl_certificate.erl56
-rw-r--r--lib/ssl/src/ssl_cipher.erl77
-rw-r--r--lib/ssl/src/ssl_cipher.hrl6
-rw-r--r--lib/ssl/src/ssl_config.erl37
-rw-r--r--lib/ssl/src/ssl_connection.erl11
-rw-r--r--lib/ssl/src/ssl_connection.hrl4
-rw-r--r--lib/ssl/src/ssl_crl.erl82
-rw-r--r--lib/ssl/src/ssl_crl_cache.erl179
-rw-r--r--lib/ssl/src/ssl_crl_cache_api.erl30
-rw-r--r--lib/ssl/src/ssl_handshake.erl177
-rw-r--r--lib/ssl/src/ssl_internal.hrl17
-rw-r--r--lib/ssl/src/ssl_manager.erl231
-rw-r--r--lib/ssl/src/ssl_pkix_db.erl101
-rw-r--r--lib/ssl/src/ssl_record.erl10
-rw-r--r--lib/ssl/src/ssl_v3.erl5
-rw-r--r--lib/ssl/src/tls_connection.erl7
-rw-r--r--lib/ssl/src/tls_handshake.erl82
-rw-r--r--lib/ssl/src/tls_record.erl34
-rw-r--r--lib/ssl/src/tls_v1.erl10
-rw-r--r--lib/ssl/test/Makefile4
-rw-r--r--lib/ssl/test/make_certs.erl89
-rw-r--r--lib/ssl/test/ssl_basic_SUITE.erl120
-rw-r--r--lib/ssl/test/ssl_certificate_verify_SUITE.erl4
-rw-r--r--lib/ssl/test/ssl_cipher_SUITE.erl189
-rw-r--r--lib/ssl/test/ssl_crl_SUITE.erl598
-rw-r--r--lib/ssl/test/ssl_pem_cache_SUITE.erl127
-rw-r--r--lib/ssl/test/ssl_session_cache_SUITE.erl2
-rw-r--r--lib/ssl/test/ssl_test_lib.erl16
-rw-r--r--lib/ssl/test/ssl_upgrade_SUITE.erl164
-rw-r--r--lib/ssl/vsn.mk2
-rw-r--r--lib/stdlib/doc/src/io.xml3
-rw-r--r--lib/stdlib/doc/src/io_lib.xml70
-rw-r--r--lib/stdlib/doc/src/math.xml1
-rw-r--r--lib/stdlib/doc/src/re.xml21
-rw-r--r--lib/stdlib/src/c.erl17
-rw-r--r--lib/stdlib/src/dict.erl6
-rw-r--r--lib/stdlib/src/erl_lint.erl17
-rw-r--r--lib/stdlib/src/ets.erl7
-rw-r--r--lib/stdlib/src/io_lib.erl42
-rw-r--r--lib/stdlib/src/io_lib_format.erl112
-rw-r--r--lib/stdlib/src/math.erl7
-rw-r--r--lib/stdlib/src/otp_internal.erl13
-rw-r--r--lib/stdlib/src/string.erl46
-rw-r--r--lib/stdlib/test/erl_lint_SUITE.erl8
-rw-r--r--lib/stdlib/test/io_SUITE.erl22
-rw-r--r--lib/stdlib/test/stdlib_SUITE.erl15
-rw-r--r--lib/stdlib/test/string_SUITE.erl40
-rw-r--r--lib/syntax_tools/src/epp_dodger.erl6
-rw-r--r--lib/syntax_tools/src/erl_prettypr.erl33
-rw-r--r--lib/syntax_tools/src/erl_syntax.erl208
-rw-r--r--lib/syntax_tools/src/erl_syntax_lib.erl106
-rw-r--r--lib/syntax_tools/src/erl_tidy.erl30
-rw-r--r--lib/syntax_tools/src/igor.erl23
-rw-r--r--lib/test_server/src/erl2html2.erl20
-rw-r--r--lib/test_server/src/test_server.erl232
-rw-r--r--lib/test_server/src/test_server_ctrl.erl9
-rw-r--r--lib/test_server/src/ts_make.erl12
-rw-r--r--lib/test_server/test/erl2html2_SUITE.erl2
-rw-r--r--lib/tools/doc/src/cover.xml109
-rw-r--r--lib/tools/src/cover.erl851
-rw-r--r--lib/tools/src/lcnt.erl70
-rw-r--r--lib/tools/test/cover_SUITE.erl296
-rw-r--r--lib/wx/src/wxe_server.erl14
-rw-r--r--lib/wx/test/wx_event_SUITE.erl8
534 files changed, 26283 insertions, 13444 deletions
diff --git a/lib/asn1/src/Makefile b/lib/asn1/src/Makefile
index 6798da0072..40f440423d 100644
--- a/lib/asn1/src/Makefile
+++ b/lib/asn1/src/Makefile
@@ -206,6 +206,7 @@ $(EBIN)/asn1ct_constructed_per.beam: asn1ct_constructed_per.erl asn1_records.hrl
$(EBIN)/asn1ct_func.beam: asn1ct_func.erl
$(EBIN)/asn1ct_gen.beam: asn1ct_gen.erl asn1_records.hrl
$(EBIN)/asn1ct_gen_ber_bin_v2.beam: asn1ct_gen_ber_bin_v2.erl asn1_records.hrl
+$(EBIN)/asn1ct_gen_check.beam: asn1_records.hrl
$(EBIN)/asn1ct_gen_per.beam: asn1ct_gen_per.erl asn1_records.hrl
$(EBIN)/asn1ct_gen_per_rt2ct.beam: asn1ct_gen_per_rt2ct.erl asn1_records.hrl
$(EBIN)/asn1ct_imm.beam: asn1ct_imm.erl
diff --git a/lib/asn1/src/asn1_db.erl b/lib/asn1/src/asn1_db.erl
index 48d9dd16d7..5577969727 100644
--- a/lib/asn1/src/asn1_db.erl
+++ b/lib/asn1/src/asn1_db.erl
@@ -19,7 +19,8 @@
%%
-module(asn1_db).
--export([dbstart/1,dbnew/2,dbload/1,dbload/3,dbsave/2,dbput/3,dbget/2]).
+-export([dbstart/1,dbnew/2,dbload/1,dbload/3,dbsave/2,dbput/2,
+ dbput/3,dbget/2]).
-export([dbstop/0]).
-record(state, {parent, monitor, includes, table}).
@@ -44,6 +45,7 @@ dbload(Module) ->
dbnew(Module, Erule) -> req({new, Module, Erule}).
dbsave(OutFile, Module) -> cast({save, OutFile, Module}).
dbput(Module, K, V) -> cast({set, Module, K, V}).
+dbput(Module, Kvs) -> cast({set, Module, Kvs}).
dbget(Module, K) -> req({get, Module, K}).
dbstop() -> Resp = req(stop), erase(?MODULE), Resp.
@@ -82,6 +84,10 @@ loop(#state{parent = Parent, monitor = MRef, table = Table,
[{_, Modtab}] = ets:lookup(Table, Mod),
ets:insert(Modtab, {K2, V}),
loop(State);
+ {set, Mod, Kvs} ->
+ [{_, Modtab}] = ets:lookup(Table, Mod),
+ ets:insert(Modtab, Kvs),
+ loop(State);
{From, {get, Mod, K2}} ->
%% XXX If there is no information for Mod, get_table/3
%% will attempt to load information from an .asn1db
diff --git a/lib/asn1/src/asn1_records.hrl b/lib/asn1/src/asn1_records.hrl
index 6c1cf1b12a..84435b2d21 100644
--- a/lib/asn1/src/asn1_records.hrl
+++ b/lib/asn1/src/asn1_records.hrl
@@ -81,9 +81,19 @@
module :: atom(),
val :: atom()}).
--record(state,{module,mname,type,tname,value,vname,erule,parameters=[],
- inputmodules,abscomppath=[],recordtopname=[],options,
- sourcedir}).
+-record(state,
+ {module,
+ mname,
+ tname,
+ erule,
+ parameters=[],
+ inputmodules=[],
+ abscomppath=[],
+ recordtopname=[],
+ options,
+ sourcedir,
+ error_context %Top-level thingie (contains line numbers)
+ }).
%% state record used by back-end at partial decode
%% active is set to 'yes' when a partial decode function is generated.
diff --git a/lib/asn1/src/asn1ct.erl b/lib/asn1/src/asn1ct.erl
index df341e5aab..a26d63c97d 100644
--- a/lib/asn1/src/asn1ct.erl
+++ b/lib/asn1/src/asn1ct.erl
@@ -34,7 +34,8 @@
%% Application internal exports
-export([compile_asn/3,compile_asn1/3,compile_py/3,compile/3,
vsn/0,
- get_name_of_def/1,get_pos_of_def/1]).
+ get_name_of_def/1,get_pos_of_def/1,
+ unset_pos_mod/1]).
-export([read_config_data/1,get_gen_state_field/1,
partial_inc_dec_toptype/1,update_gen_state/2,
get_tobe_refed_func/1,reset_gen_state/0,is_function_generated/1,
@@ -166,46 +167,26 @@ set_scan_parse_pass(#st{files=Files}=St) ->
{error,St#st{error=Error}}
end.
-set_scan_parse_pass_1([F|Fs], St) ->
+set_scan_parse_pass_1([F|Fs], #st{file=File}=St) ->
case asn1ct_tok:file(F) of
{error,Error} ->
throw(Error);
Tokens when is_list(Tokens) ->
- case catch asn1ct_parser2:parse(Tokens) of
+ case asn1ct_parser2:parse(File, Tokens) of
{ok,M} ->
[M|set_scan_parse_pass_1(Fs, St)];
- {error,ErrorTerm} ->
- throw(handle_parse_error(ErrorTerm, St))
+ {error,Errors} ->
+ throw(Errors)
end
end;
set_scan_parse_pass_1([], _) -> [].
-parse_pass(#st{code=Tokens}=St) ->
- case catch asn1ct_parser2:parse(Tokens) of
+parse_pass(#st{file=File,code=Tokens}=St) ->
+ case asn1ct_parser2:parse(File, Tokens) of
{ok,M} ->
{ok,St#st{code=M}};
- {error,ErrorTerm} ->
- {error,St#st{error=handle_parse_error(ErrorTerm, St)}}
- end.
-
-handle_parse_error(ErrorTerm, #st{file=File,opts=Opts}) ->
- case ErrorTerm of
- {{Line,_Mod,Message},_TokTup} ->
- if
- is_integer(Line) ->
- BaseName = filename:basename(File),
- error("syntax error at line ~p in module ~s:~n",
- [Line,BaseName], Opts);
- true ->
- error("syntax error in module ~p:~n",
- [File], Opts)
- end,
- print_error_message(Message),
- Message;
- {Line,_Mod,[Message,Token]} ->
- error("syntax error: ~p ~p at line ~p~n",
- [Message,Token,Line], Opts),
- {Line,[Message,Token]}
+ {error,Errors} ->
+ {error,St#st{error=Errors}}
end.
merge_pass(#st{file=Base,code=Code}=St) ->
@@ -559,7 +540,10 @@ unset_pos_mod(Def) when is_record(Def,pvaluesetdef) ->
unset_pos_mod(Def) when is_record(Def,pobjectdef) ->
Def#pobjectdef{pos=undefined};
unset_pos_mod(Def) when is_record(Def,pobjectsetdef) ->
- Def#pobjectsetdef{pos=undefined}.
+ Def#pobjectsetdef{pos=undefined};
+unset_pos_mod(#'ComponentType'{} = Def) ->
+ Def#'ComponentType'{pos=undefined};
+unset_pos_mod(Def) -> Def.
get_pos_of_def(#typedef{pos=Pos}) ->
Pos;
@@ -1406,33 +1390,6 @@ prepare_bytes(Bytes) -> list_to_binary(Bytes).
vsn() ->
?vsn.
-
-
-print_error_message([got,H|T]) when is_list(H) ->
- io:format(" got:"),
- print_listing(H,"and"),
- print_error_message(T);
-print_error_message([expected,H|T]) when is_list(H) ->
- io:format(" expected one of:"),
- print_listing(H,"or"),
- print_error_message(T);
-print_error_message([H|T]) ->
- io:format(" ~p",[H]),
- print_error_message(T);
-print_error_message([]) ->
- io:format("~n").
-
-print_listing([H1,H2|[]],AndOr) ->
- io:format(" ~p ~s ~p",[H1,AndOr,H2]);
-print_listing([H1,H2|T],AndOr) ->
- io:format(" ~p,",[H1]),
- print_listing([H2|T],AndOr);
-print_listing([H],_AndOr) ->
- io:format(" ~p",[H]);
-print_listing([],_) ->
- ok.
-
-
specialized_decode_prepare(Erule,M,TsAndVs,Options) ->
case lists:member(asn1config,Options) of
true ->
diff --git a/lib/asn1/src/asn1ct_check.erl b/lib/asn1/src/asn1ct_check.erl
index 240f1cbb16..99392d6eaa 100644
--- a/lib/asn1/src/asn1ct_check.erl
+++ b/lib/asn1/src/asn1ct_check.erl
@@ -23,8 +23,6 @@
%% Main Module for ASN.1 compile time functions
%-compile(export_all).
-%% Avoid warning for local function error/1 clashing with autoimported BIF.
--compile({no_auto_import,[error/1]}).
-export([check/2,storeindb/2,format_error/1]).
%-define(debug,1).
-include("asn1_records.hrl").
@@ -60,17 +58,9 @@
-define(N_BMPString, 30).
-define(TAG_PRIMITIVE(Num),
- case S#state.erule of
- ber ->
- #tag{class='UNIVERSAL',number=Num,type='IMPLICIT',form=0};
- _ -> []
- end).
+ #tag{class='UNIVERSAL',number=Num,type='IMPLICIT',form=0}).
-define(TAG_CONSTRUCTED(Num),
- case S#state.erule of
- ber ->
- #tag{class='UNIVERSAL',number=Num,type='IMPLICIT',form=32};
- _ -> []
- end).
+ #tag{class='UNIVERSAL',number=Num,type='IMPLICIT',form=32}).
-record(newt,{type=unchanged,tag=unchanged,constraint=unchanged,inlined=no}). % used in check_type to update type and tag
@@ -249,26 +239,18 @@ check_exports(S,Module = #module{}) ->
{exports,all} ->
[];
{exports,ExportList} when is_list(ExportList) ->
- IsNotDefined =
+ IsNotDefined =
fun(X) ->
- case catch get_referenced_type(S,X) of
- {error,{asn1,_}} ->
- true;
- _ -> false
+ try
+ _ = get_referenced_type(S,X),
+ false
+ catch {error,_} ->
+ true
end
end,
- case lists:filter(IsNotDefined,ExportList) of
- [] ->
- [];
- NoDefExp ->
- GetName =
- fun(T = #'Externaltypereference'{type=N})->
- %%{exported,undefined,entity,N}
- NewS=S#state{type=T,tname=N},
- error({export,"exported undefined entity",NewS})
- end,
- lists:map(GetName,NoDefExp)
- end
+ [return_asn1_error(S, Ext, {undefined_export, Undef}) ||
+ Ext = #'Externaltypereference'{type=Undef} <- ExportList,
+ IsNotDefined(Ext)]
end.
check_imports(S, #module{imports={imports,Imports}}) ->
@@ -276,53 +258,18 @@ check_imports(S, #module{imports={imports,Imports}}) ->
check_imports_1(_S, [], Acc) ->
Acc;
-check_imports_1(S, [#'SymbolsFromModule'{symbols=Imports,module=ModuleRef}|SFMs], Acc0) ->
+check_imports_1(S, [#'SymbolsFromModule'{symbols=Imports,module=ModuleRef}|SFMs], Acc) ->
Module = name_of_def(ModuleRef),
- Refs0 = [{catch get_referenced_type(S, Ref),Ref} || Ref <- Imports],
- Refs = [{M,R} || {{M,_},R} <- Refs0],
- {Illegal,Other} = lists:splitwith(fun({error,_}) -> true;
- (_) -> false
- end, Refs),
- ChainedRefs = [R || {M,R} <- Other, M =/= Module],
- IllegalRefs = [R || {error,R} <- Illegal] ++
- [R || {M,R} <- ChainedRefs,
- ok =/= chained_import(S, Module, M, name_of_def(R))],
- Acc = [return_asn1_error(S, Ref, {undefined_import,name_of_def(Ref),Module}) ||
- Ref <- IllegalRefs] ++ Acc0,
- check_imports_1(S, SFMs, Acc).
-
-chained_import(S,ImpMod,DefMod,Name) ->
- %% Name is a referenced structure that is not defined in ImpMod,
- %% but must be present in the Imports list of ImpMod. The chain of
- %% imports of Name must end in DefMod.
- GetImports =
- fun(_M_) ->
- case asn1_db:dbget(_M_,'MODULE') of
- #module{imports={imports,ImportList}} ->
- ImportList;
- _ -> []
- end
- end,
- FindNameInImports =
- fun([],N,_) -> {no_mod,N};
- ([#'SymbolsFromModule'{symbols=Imports,module=ModuleRef}|SFMs],N,F) ->
- case [name_of_def(X) || X <- Imports, name_of_def(X) =:= N] of
- [] -> F(SFMs,N,F);
- [N] -> {name_of_def(ModuleRef),N}
- end
- end,
- case GetImports(ImpMod) of
- [] ->
- error;
- Imps ->
- case FindNameInImports(Imps,Name,FindNameInImports) of
- {no_mod,_} ->
- error;
- {DefMod,_} -> ok;
- {OtherMod,_} ->
- chained_import(S,OtherMod,DefMod,Name)
- end
- end.
+ Refs = [{try get_referenced_type(S, Ref)
+ catch throw:Error -> Error end,
+ Ref}
+ || Ref <- Imports],
+ CreateError = fun(Ref) ->
+ Error = {undefined_import,name_of_def(Ref),Module},
+ return_asn1_error(S, Ref, Error)
+ end,
+ Errors = [CreateError(Ref) || {{error, _}, Ref} <- Refs],
+ check_imports_1(S, SFMs, Errors ++ Acc).
checkt(S0, Names) ->
Check = fun do_checkt/3,
@@ -335,7 +282,7 @@ checkt(S0, Names) ->
check_fold(S0, lists:reverse(CtxtSwitch), Check) ++ Types.
do_checkt(S, Name, #typedef{typespec=TypeSpec}=Type0) ->
- NewS = S#state{type=Type0,tname=Name},
+ NewS = S#state{tname=Name},
try check_type(NewS, Type0, TypeSpec) of
#type{}=Ts ->
case Type0#typedef.checked of
@@ -350,7 +297,7 @@ do_checkt(S, Name, #typedef{typespec=TypeSpec}=Type0) ->
end
catch
{error,Reason} ->
- error({type,Reason,NewS});
+ Reason;
{asn1_class,_ClassDef} ->
{asn1_class,Name};
pobjectsetdef ->
@@ -384,33 +331,32 @@ do_checkv(S, Name, Value)
is_record(Value, typedef); %Value set may be parsed as object set.
is_record(Value, pvaluedef);
is_record(Value, pvaluesetdef) ->
- NewS = S#state{value=Value},
- try check_value(NewS, Value) of
+ try check_value(S, Value) of
{valueset,VSet} ->
Pos = asn1ct:get_pos_of_def(Value),
CheckedVSDef = #typedef{checked=true,pos=Pos,
name=Name,typespec=VSet},
- asn1_db:dbput(NewS#state.mname, Name, CheckedVSDef),
+ asn1_db:dbput(S#state.mname, Name, CheckedVSDef),
{valueset,Name};
V ->
%% update the valuedef
- asn1_db:dbput(NewS#state.mname, Name, V),
+ asn1_db:dbput(S#state.mname, Name, V),
ok
catch
{error,Reason} ->
- error({value,Reason,NewS});
+ Reason;
{pobjectsetdef} ->
{pobjectsetdef,Name};
{objectsetdef} ->
{objectsetdef,Name};
- {objectdef} ->
+ {asn1_class, _} ->
%% this is an object, save as typedef
#valuedef{checked=C,pos=Pos,name=N,type=Type,
value=Def} = Value,
ClassName = Type#type.def,
NewSpec = #'Object'{classname=ClassName,def=Def},
NewDef = #typedef{checked=C,pos=Pos,name=N,typespec=NewSpec},
- asn1_db:dbput(NewS#state.mname, Name, NewDef),
+ asn1_db:dbput(S#state.mname, Name, NewDef),
{objectdef,Name}
end.
@@ -419,7 +365,7 @@ checkp(S, Names) ->
check_fold(S, Names, fun do_checkp/3).
do_checkp(S0, Name, #ptypedef{typespec=TypeSpec}=Type0) ->
- S = S0#state{type=Type0,tname=Name},
+ S = S0#state{tname=Name},
try check_ptype(S, Type0, TypeSpec) of
#type{}=Ts ->
Type = Type0#ptypedef{checked=true,typespec=Ts},
@@ -427,7 +373,7 @@ do_checkp(S0, Name, #ptypedef{typespec=TypeSpec}=Type0) ->
ok
catch
{error,Reason} ->
- error({type,Reason,S});
+ Reason;
{asn1_class,_ClassDef} ->
{asn1_class,Name};
{asn1_param_class,_} ->
@@ -438,100 +384,81 @@ do_checkp(S0, Name, #ptypedef{typespec=TypeSpec}=Type0) ->
checkc(S, Names) ->
check_fold(S, Names, fun do_checkc/3).
-do_checkc(S0, Name, Class0) ->
- {Class1,ClassSpec} =
- case Class0 of
- #classdef{} ->
- {Class0,Class0};
- #typedef{} ->
- {#classdef{name=Name},Class0#typedef.typespec}
- end,
- S = S0#state{type=Class0,tname=Name},
- try check_class(S, ClassSpec) of
- C ->
- Class = Class1#classdef{checked=true,typespec=C},
- asn1_db:dbput(S#state.mname, Name, Class),
- ok
- catch
- {error,Reason} ->
- error({class,Reason,S})
- end.
+do_checkc(S, Name, Class) ->
+ try
+ case is_classname(Name) of
+ false ->
+ asn1_error(S, {illegal_class_name,Name});
+ true ->
+ do_checkc_1(S, Name, Class)
+ end
+ catch {error,Reason} -> Reason
+ end.
+
+do_checkc_1(S, Name, #classdef{}=Class) ->
+ C = check_class(S, Class),
+ store_class(S, true, Class#classdef{typespec=C}, Name),
+ ok;
+do_checkc_1(S, Name, #typedef{typespec=#type{def=Def}=TS}) ->
+ C = check_class(S, TS),
+ {Mod,Pos} = case Def of
+ #'Externaltypereference'{module=M, pos=P} ->
+ {M,P};
+ {pt, #'Externaltypereference'{module=M, pos=P}, _} ->
+ {M,P}
+ end,
+ Class = #classdef{name=Name, typespec=C, pos=Pos, module=Mod},
+ store_class(S, true, Class, Name),
+ ok.
+
+%% is_classname(Atom) -> true|false.
+is_classname(Name) when is_atom(Name) ->
+ lists:all(fun($-) -> true;
+ (D) when $0 =< D, D =< $9 -> true;
+ (UC) when $A =< UC, UC =< $Z -> true;
+ (_) -> false
+ end, atom_to_list(Name)).
-checko(S,[Name|Os],Acc,ExclO,ExclOS) ->
- ?dbg("Checking object ~p~n",[Name]),
- Result =
- case asn1_db:dbget(S#state.mname,Name) of
- undefined ->
- error({type,{internal_error,'???'},S});
- Object when is_record(Object,typedef) ->
- NewS = S#state{type=Object,tname=Name},
- case catch(check_object(NewS,Object,Object#typedef.typespec)) of
- {error,Reason} ->
- error({type,Reason,NewS});
- {'EXIT',Reason} ->
- error({type,{internal_error,Reason},NewS});
- {asn1,Reason} ->
- error({type,Reason,NewS});
- O ->
- NewObj = Object#typedef{checked=true,typespec=O},
- asn1_db:dbput(NewS#state.mname,Name,NewObj),
- if
- is_record(O,'Object') ->
- case O#'Object'.gen of
- true ->
- {ok,ExclO,ExclOS};
- false ->
- {ok,[Name|ExclO],ExclOS}
- end;
- is_record(O,'ObjectSet') ->
- case O#'ObjectSet'.gen of
- true ->
- {ok,ExclO,ExclOS};
- false ->
- {ok,ExclO,[Name|ExclOS]}
- end
- end
- end;
- PObject when is_record(PObject,pobjectdef) ->
- NewS = S#state{type=PObject,tname=Name},
- case (catch check_pobject(NewS,PObject)) of
- {error,Reason} ->
- error({type,Reason,NewS});
- {'EXIT',Reason} ->
- error({type,{internal_error,Reason},NewS});
- {asn1,Reason} ->
- error({type,Reason,NewS});
- PO ->
- NewPObj = PObject#pobjectdef{def=PO},
- asn1_db:dbput(NewS#state.mname,Name,NewPObj),
- {ok,[Name|ExclO],ExclOS}
- end;
- PObjSet when is_record(PObjSet,pvaluesetdef) ->
- %% this is a parameterized object set. Might be a parameterized
- %% value set, couldn't it?
- NewS = S#state{type=PObjSet,tname=Name},
- case (catch check_pobjectset(NewS,PObjSet)) of
- {error,Reason} ->
- error({type,Reason,NewS});
- {'EXIT',Reason} ->
- error({type,{internal_error,Reason},NewS});
- {asn1,Reason} ->
- error({type,Reason,NewS});
- POS ->
- %%NewPObjSet = PObjSet#pvaluesetdef{valueset=POS},
- asn1_db:dbput(NewS#state.mname,Name,POS),
- {ok,ExclO,[Name|ExclOS]}
- end
- end,
- case Result of
- {ok,NewExclO,NewExclOS} ->
- checko(S,Os,Acc,NewExclO,NewExclOS);
- _ ->
- checko(S,Os,[Result|Acc],ExclO,ExclOS)
+checko(S0,[Name|Os],Acc,ExclO,ExclOS) ->
+ Item = asn1_db:dbget(S0#state.mname, Name),
+ S = S0#state{error_context=Item},
+ try checko_1(S, Item, Name, ExclO, ExclOS) of
+ {NewExclO,NewExclOS} ->
+ checko(S, Os, Acc, NewExclO, NewExclOS)
+ catch
+ throw:{error, Error} ->
+ checko(S, Os, [Error|Acc], ExclO, ExclOS)
end;
checko(_S,[],Acc,ExclO,ExclOS) ->
{lists:reverse(Acc),lists:reverse(ExclO),lists:reverse(ExclOS)}.
+checko_1(S, #typedef{typespec=TS}=Object, Name, ExclO, ExclOS) ->
+ NewS = S#state{tname=Name},
+ O = check_object(NewS, Object, TS),
+ NewObj = Object#typedef{checked=true,typespec=O},
+ asn1_db:dbput(NewS#state.mname, Name, NewObj),
+ case O of
+ #'Object'{gen=true} ->
+ {ExclO,ExclOS};
+ #'Object'{gen=false} ->
+ {[Name|ExclO],ExclOS};
+ #'ObjectSet'{gen=true} ->
+ {ExclO,ExclOS};
+ #'ObjectSet'{gen=false} ->
+ {ExclO,[Name|ExclOS]}
+ end;
+checko_1(S, #pobjectdef{}=PObject, Name, ExclO, ExclOS) ->
+ NewS = S#state{tname=Name},
+ PO = check_pobject(NewS, PObject),
+ NewPObj = PObject#pobjectdef{def=PO},
+ asn1_db:dbput(NewS#state.mname, Name, NewPObj),
+ {[Name|ExclO],ExclOS};
+checko_1(S, #pvaluesetdef{}=PObjSet, Name, ExclO, ExclOS) ->
+ NewS = S#state{tname=Name},
+ POS = check_pobjectset(NewS, PObjSet),
+ asn1_db:dbput(NewS#state.mname, Name, POS),
+ {ExclO,[Name|ExclOS]}.
+
check_class(S,CDef=#classdef{checked=Ch,name=Name,typespec=TS}) ->
case Ch of
true -> TS;
@@ -551,22 +478,16 @@ check_class(S = #state{mname=M,tname=T},ClassSpec)
Tref = #'Externaltypereference'{type=TName} ->
{MName,RefType} = get_referenced_type(S,Tref),
#classdef{} = CD = get_class_def(S, RefType),
- NewState = update_state(S#state{type=RefType,
- tname=TName}, MName),
+ NewState = update_state(S#state{tname=TName}, MName),
check_class(NewState, CD);
{pt,ClassRef,Params} ->
%% parameterized class
{_,PClassDef} = get_referenced_type(S,ClassRef),
- NewParaList =
- [match_parameters(S,TmpParam,S#state.parameters)||
- TmpParam <- Params],
+ NewParaList = match_parameters(S, Params),
instantiate_pclass(S,PClassDef,NewParaList)
end;
-check_class(S,C) when is_record(C,objectclass) ->
- NewFieldSpec = check_class_fields(S,C#objectclass.fields),
- C#objectclass{fields=NewFieldSpec};
-check_class(_S,{poc,_ObjSet,_Params}) ->
- 'fix this later';
+check_class(S, #objectclass{}=C) ->
+ check_objectclass(S, C);
check_class(S,ClassName) ->
{RefMod,Def} = get_referenced_type(S,ClassName),
case Def of
@@ -579,8 +500,7 @@ check_class(S,ClassName) ->
false ->
Name=ClassName#'Externaltypereference'.type,
store_class(S,idle,ClassDef,Name),
-% NewS = S#state{mname=RefMod,type=Def,tname=Name},
- NewS = update_state(S#state{type=Def,tname=Name},RefMod),
+ NewS = update_state(S#state{tname=Name}, RefMod),
CheckedTS = check_class(NewS,ClassDef#classdef.typespec),
store_class(S,true,ClassDef#classdef{typespec=CheckedTS},Name),
CheckedTS
@@ -594,11 +514,20 @@ check_class(S,ClassName) ->
end
end.
+check_objectclass(S, #objectclass{fields=Fs0,syntax=Syntax0}=C) ->
+ Fs = check_class_fields(S, Fs0),
+ case Syntax0 of
+ {'WITH SYNTAX',Syntax1} ->
+ Syntax = preprocess_syntax(S, Syntax1, Fs),
+ C#objectclass{fields=Fs,syntax={preprocessed_syntax,Syntax}};
+ _ ->
+ C#objectclass{fields=Fs}
+ end.
+
instantiate_pclass(S=#state{parameters=_OldArgs},PClassDef,Params) ->
#ptypedef{args=Args,typespec=Type} = PClassDef,
MatchedArgs = match_args(S,Args, Params, []),
-% NewS = S#state{type=Type,parameters=MatchedArgs++OldArgs,abscomppath=[]},
- NewS = S#state{type=Type,parameters=MatchedArgs,abscomppath=[]},
+ NewS = S#state{parameters=MatchedArgs,abscomppath=[]},
check_class(NewS,#classdef{name=S#state.tname,typespec=Type}).
store_class(S,Mode,ClassDef,ClassName) ->
@@ -613,6 +542,12 @@ check_class_fields(S,[F|Fields],Acc) ->
case element(1,F) of
fixedtypevaluefield ->
{_,Name,Type,Unique,OSpec} = F,
+ case {Unique,OSpec} of
+ {'UNIQUE',{'DEFAULT',_}} ->
+ asn1_error(S, {unique_and_default,Name});
+ {_,_} ->
+ ok
+ end,
RefType = check_type(S,#typedef{typespec=Type},Type),
{fixedtypevaluefield,Name,RefType,Unique,OSpec};
object_or_fixedtypevalue_field ->
@@ -621,7 +556,7 @@ check_class_fields(S,[F|Fields],Acc) ->
Cat =
case asn1ct_gen:type(asn1ct_gen:get_inner(Type2#type.def)) of
Def when is_record(Def,'Externaltypereference') ->
- {_,D} = get_referenced_type(S,Def),
+ {_,D} = get_referenced_type(S, Def, true),
D;
{undefined,user} ->
%% neither of {primitive,bif} or {constructed,bif}
@@ -644,18 +579,14 @@ check_class_fields(S,[F|Fields],Acc) ->
objectset_or_fixedtypevalueset_field ->
{_,Name,Type,OSpec} = F,
RefType =
- case (catch check_type(S,#typedef{typespec=Type},Type)) of
- {asn1_class,_ClassDef} ->
+ try check_type(S,#typedef{typespec=Type},Type) of
+ #type{} = CheckedType ->
+ CheckedType
+ catch {asn1_class,_ClassDef} ->
case if_current_checked_type(S,Type) of
- true ->
- Type#type.def;
- _ ->
- check_class(S,Type)
- end;
- CheckedType when is_record(CheckedType,type) ->
- CheckedType;
- _ ->
- error({class,"internal error, check_class_fields",S})
+ true -> Type#type.def;
+ _ -> check_class(S,Type)
+ end
end,
if
is_record(RefType,'Externaltypereference') ->
@@ -733,38 +664,34 @@ check_pobjectset(S,PObjSet) ->
PObjSet
end.
+-record(osi, %Object set information.
+ {st,
+ classref,
+ uniq,
+ ext
+ }).
+
check_object(_S,ObjDef,ObjSpec) when (ObjDef#typedef.checked == true) ->
ObjSpec;
check_object(S,_ObjDef,#'Object'{classname=ClassRef,def=ObjectDef}) ->
?dbg("check_object ~p~n",[ObjectDef]),
-%% io:format("check_object,object: ~p~n",[ObjectDef]),
-% {MName,_ClassDef} = get_referenced_type(S,ClassRef),
- NewClassRef = check_externaltypereference(S,ClassRef),
- ClassDef =
- case get_referenced_type(S,ClassRef) of
- {MName,ClDef=#classdef{checked=false}} ->
- NewState = update_state(S#state{type=ClDef,
- tname=ClassRef#'Externaltypereference'.type},MName),
- ObjClass=
- check_class(NewState,ClDef),
- #classdef{checked=true,
- typespec=ObjClass};
- {_,_ClDef} when is_record(_ClDef,classdef) ->
- _ClDef;
- {MName,_TDef=#typedef{checked=false,pos=Pos,
- name=_TName,typespec=TS}} ->
- ClDef = #classdef{pos=Pos,name=_TName,typespec=TS},
- NewState = update_state(S#state{type=_TDef,
- tname=ClassRef#'Externaltypereference'.type},MName),
- ObjClass =
- check_class(NewState,ClDef),
- ClDef#classdef{checked=true,typespec=ObjClass};
- {_,_ClDef} ->
- _ClDef
+ _ = check_externaltypereference(S,ClassRef),
+ {ClassDef, NewClassRef} =
+ case get_referenced_type(S, ClassRef, true) of
+ {MName,#classdef{checked=false, name=CLName}=ClDef} ->
+ Type = ClassRef#'Externaltypereference'.type,
+ NewState = update_state(S#state{tname=Type}, MName),
+ ObjClass = check_class(NewState, ClDef),
+ {ClDef#classdef{checked=true, typespec=ObjClass},
+ #'Externaltypereference'{module=MName, type=CLName}};
+ {MName,#classdef{name=CLName}=ClDef} ->
+ {ClDef, #'Externaltypereference'{module=MName, type=CLName}};
+ _ ->
+ asn1_error(S, illegal_object)
end,
NewObj =
case ObjectDef of
- Def when is_tuple(Def), (element(1,Def)==object) ->
+ {object,_,_}=Def ->
NewSettingList = check_objectdefn(S,Def,ClassDef),
#'Object'{def=NewSettingList};
{po,{object,DefObj},ArgsList} ->
@@ -778,425 +705,287 @@ check_object(S,_ObjDef,#'Object'{classname=ClassRef,def=ObjectDef}) ->
instantiate_po(S,ClassDef,Object,ArgList);
#'Externalvaluereference'{} ->
{_,Object} = get_referenced_type(S,ObjectDef),
- check_object(S,Object,Object#typedef.typespec);
+ check_object(S, Object, object_to_check(S, Object));
[] ->
- %% An object with no fields. All class fields must be
- %% optional or default. Check that all fields in
- %% class are 'OPTIONAL' or 'DEFAULT'
- class_fields_optional_check(S,ClassDef),
- #'Object'{def={object,defaultsyntax,[]}};
- _ ->
- exit({error,{no_object,ObjectDef},S})
+ %% An object with no fields (parsed as a value).
+ Def = {object,defaultsyntax,[]},
+ NewSettingList = check_objectdefn(S, Def, ClassDef),
+ #'Object'{def=NewSettingList};
+ _ ->
+ asn1_error(S, illegal_object)
end,
- Gen = gen_incl(S,NewObj#'Object'.def,
- (ClassDef#classdef.typespec)#objectclass.fields),
+ Fields = (ClassDef#classdef.typespec)#objectclass.fields,
+ Gen = gen_incl(S,NewObj#'Object'.def, Fields),
NewObj#'Object'{classname=NewClassRef,gen=Gen};
-
-
-check_object(S,
- _ObjSetDef,
- ObjSet=#'ObjectSet'{class=ClassRef}) ->
-%% io:format("check_object,SET: ~p~n",[ObjSet#'ObjectSet'.set]),
- ?dbg("check_object set: ~p~n",[ObjSet#'ObjectSet'.set]),
- {_,ClassDef} = get_referenced_type(S,ClassRef),
- NewClassRef = check_externaltypereference(S,ClassRef),
- {UniqueFieldName,UniqueInfo} =
- case (catch get_unique_fieldname(S,ClassDef)) of
- {error,'__undefined_',_} ->
- {{unique,undefined},{unique,undefined}};
- {asn1,Msg,_} -> error({class,Msg,S});
- {'EXIT',Msg} -> error({class,{internal_error,Msg},S});
+check_object(S, _, #'ObjectSet'{class=ClassRef0,set=Set0}=ObjSet0) ->
+ {_,ClassDef} = get_referenced_type(S, ClassRef0),
+ ClassRef = check_externaltypereference(S, ClassRef0),
+ {UniqueFieldName,UniqueInfo} =
+ case get_unique_fieldname(S, ClassDef) of
+ no_unique -> {{unique,undefined},{unique,undefined}};
Other -> {element(1,Other),Other}
end,
- NewObjSet=
- case prepare_objset(ObjSet#'ObjectSet'.set) of
- {set,SET,EXT} ->
- CheckedSet = check_object_list(S,NewClassRef,SET),
- NewSet = get_unique_valuelist(S,CheckedSet,UniqueInfo),
- ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
- set=extensionmark(NewSet,EXT)};
-
- {'SingleValue',ERef = #'Externalvaluereference'{}} ->
- {RefedMod,ObjDef} = get_referenced_type(S,ERef),
- #'Object'{def=CheckedObj} =
- check_object(S,ObjDef,ObjDef#typedef.typespec),
-
- NewSet = get_unique_valuelist(S,[{{RefedMod,get_datastr_name(ObjDef)},
- CheckedObj}],
- UniqueInfo),
- ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
- set=NewSet};
- ['EXTENSIONMARK'] ->
- ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
- set=['EXTENSIONMARK']};
-
- OSref when is_record(OSref,'Externaltypereference') ->
- {_,OS=#typedef{typespec=OSdef}} = get_referenced_type(S,OSref),
- check_object(S,OS,OSdef);
-
- {Type,{'EXCEPT',Exclusion}} when is_record(Type,type) ->
- {_,TDef} = get_referenced_type(S,Type#type.def),
- OS = TDef#typedef.typespec,
- NewSet = reduce_objectset(OS#'ObjectSet'.set,Exclusion),
- NewOS = OS#'ObjectSet'{set=NewSet},
- check_object(S,TDef#typedef{typespec=NewOS},
- NewOS);
- #type{def={pt,DefinedObjSet,ParamList}} ->
- {_,PObjSetDef} = get_referenced_type(S,DefinedObjSet),
- NewParamList =
- [match_parameters(S,TmpParam,S#state.parameters)||
- TmpParam <- ParamList],
- instantiate_pos(S,ClassRef,PObjSetDef,NewParamList);
-
- %% actually this is an ObjectSetFromObjects construct, it
- %% is when the object set is retrieved from an object
- %% field.
- #type{def=#'ObjectClassFieldType'{classname=ObjName,
- fieldname=FieldName}} ->
- {RefedObjMod,TDef} = get_referenced_type(S,ObjName),
- OS=TDef#typedef.typespec,
- %% should get the right object set here. Get the field
- %% FieldName out of the object set OS of class
- %% OS#'ObjectSet'.class
- OS2=check_object(S,TDef,OS),
- NewSet=object_set_from_objects(S,RefedObjMod,FieldName,OS2),
- ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
- set=NewSet};
- {'ObjectSetFromObjects',{_,_,ObjName},FieldName} ->
- {RefedObjMod,TDef} = get_referenced_type(S,ObjName),
- OS=TDef#typedef.typespec,
- %% should get the right object set here. Get the field
- %% FieldName out of the object set OS of class
- %% OS#'ObjectSet'.class
- OS2=check_object(S,TDef,OS),
- NewSet=object_set_from_objects(S,RefedObjMod,FieldName,OS2),
- ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
- set=NewSet};
- {'ObjectSetFromObjects',{_,ObjName},FieldName} ->
- %% This is a ObjectSetFromObjects, i.e.
- %% ObjectSetFromObjects ::= ReferencedObjects "." FieldName
- %% with a defined object as ReferencedObjects. And
- %% the FieldName of the Class (object) contains an object set.
- {RefedObjMod,TDef} = get_referenced_type(S,ObjName),
- O1 = TDef#typedef.typespec,
- O2 = check_object(S,TDef,O1),
- NewSet = object_set_from_objects(S,RefedObjMod,FieldName,O2),
- OS2=ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
- set=NewSet},
- %%io:format("ObjectSet: ~p~n",[OS2]),
- OS2;
- {pos,{objectset,_,DefinedObjSet},Params} ->
- {_,PObjSetDef} = get_referenced_type(S,DefinedObjSet),
- NewParamList =
- [match_parameters(S,TmpParam,S#state.parameters)||
- TmpParam <- Params],
- instantiate_pos(S,ClassRef,PObjSetDef,NewParamList);
- Unknown ->
- exit({error,{unknown_object_set,Unknown},S})
- end,
- NewSet2 = remove_duplicate_objects(NewObjSet#'ObjectSet'.set),
- NewObjSet2 = NewObjSet#'ObjectSet'{set=NewSet2},
- Gen = gen_incl_set(S,NewObjSet2#'ObjectSet'.set,
- ClassDef),
- ?dbg("check_object done~n",[]),
- NewObjSet2#'ObjectSet'{class=NewClassRef,gen=Gen}.
+ OSI0 = #osi{st=S,classref=ClassRef,uniq=UniqueInfo,ext=false},
+ {Set1,OSI1} = if
+ is_list(Set0) ->
+ check_object_set_list(Set0, OSI0);
+ true ->
+ check_object_set(Set0, OSI0)
+ end,
+ Ext = case Set1 of
+ [] ->
+ %% FIXME: X420 does not compile unless we force
+ %% empty sets to be extensible. There should be
+ %% a better way.
+ true;
+ [_|_] ->
+ OSI1#osi.ext
+ end,
+ Set2 = remove_duplicate_objects(S, Set1),
+ Set = case Ext of
+ false -> Set2;
+ true -> Set2 ++ ['EXTENSIONMARK']
+ end,
+ ObjSet = ObjSet0#'ObjectSet'{uniquefname=UniqueFieldName,set=Set},
+ Gen = gen_incl_set(S, Set, ClassDef),
+ ObjSet#'ObjectSet'{class=ClassRef,gen=Gen}.
+
+check_object_set({element_set,Root0,Ext0}, OSI0) ->
+ OSI = case Ext0 of
+ none -> OSI0;
+ _ -> OSI0#osi{ext=true}
+ end,
+ case {Root0,Ext0} of
+ {empty,empty} -> {[],OSI};
+ {empty,Ext} -> check_object_set(Ext, OSI);
+ {Root,none} -> check_object_set(Root, OSI);
+ {Root,empty} -> check_object_set(Root, OSI);
+ {Root,Ext} -> check_object_set_list([Root,Ext], OSI)
+ end;
+check_object_set(#'Externaltypereference'{}=Ref, #osi{st=S}=OSI) ->
+ {_,#typedef{typespec=OSdef}=OS} = get_referenced_type(S, Ref),
+ ObjectSet = check_object(S, OS, OSdef),
+ check_object_set_objset(ObjectSet, OSI);
+check_object_set(#'Externalvaluereference'{}=Ref, #osi{st=S}=OSI) ->
+ {RefedMod,ObjName,#'Object'{def=Def}} = check_referenced_object(S, Ref),
+ ObjList = check_object_set_mk(RefedMod, ObjName, Def, OSI),
+ {ObjList,OSI};
+check_object_set({'EXCEPT',Incl0,Excl0}, OSI) ->
+ {Incl1,_} = check_object_set(Incl0, OSI),
+ {Excl1,_} = check_object_set(Excl0, OSI),
+ Exclude = sofs:set([N || {N,_} <- Excl1], [name]),
+ Incl2 = [{Name,Obj} || {Name,_,_}=Obj <- Incl1],
+ Incl3 = sofs:relation(Incl2, [{name,object}]),
+ Incl4 = sofs:drestriction(Incl3, Exclude),
+ Incl5 = sofs:to_external(Incl4),
+ Incl = [Obj || {_,Obj} <- Incl5],
+ {Incl,OSI};
+check_object_set({object,_,_}=Obj0, OSI) ->
+ #osi{st=S,classref=ClassRef} = OSI,
+ #'Object'{def=Def} =
+ check_object(S, #typedef{typespec=Obj0},
+ #'Object'{classname=ClassRef,def=Obj0}),
+ ObjList = check_object_set_mk(Def, OSI),
+ {ObjList,OSI};
+check_object_set(#'ObjectClassFieldType'{classname=ObjName,
+ fieldname=FieldNames},
+ #osi{st=S}=OSI) ->
+ Set = check_ObjectSetFromObjects(S, ObjName, FieldNames),
+ check_object_set_objset_list(Set, OSI);
+check_object_set({'ObjectSetFromObjects',Obj,FieldNames}, #osi{st=S}=OSI) ->
+ ObjName = element(tuple_size(Obj), Obj),
+ Set = check_ObjectSetFromObjects(S, ObjName, FieldNames),
+ check_object_set_objset_list(Set, OSI);
+check_object_set({pt,DefinedObjSet,ParamList0}, OSI) ->
+ #osi{st=S,classref=ClassRef} = OSI,
+ {_,PObjSetDef} = get_referenced_type(S, DefinedObjSet),
+ ParamList = match_parameters(S, ParamList0),
+ ObjectSet = instantiate_pos(S, ClassRef, PObjSetDef, ParamList),
+ check_object_set_objset(ObjectSet, OSI);
+check_object_set({pos,{objectset,_,DefinedObjSet},Params0}, OSI) ->
+ #osi{st=S,classref=ClassRef} = OSI,
+ {_,PObjSetDef} = get_referenced_type(S, DefinedObjSet),
+ Params = match_parameters(S, Params0),
+ ObjectSet = instantiate_pos(S, ClassRef, PObjSetDef, Params),
+ check_object_set_objset(ObjectSet, OSI);
+check_object_set({pv,{simpledefinedvalue,DefinedObject},Params}=PV, OSI) ->
+ #osi{st=S,classref=ClassRef} = OSI,
+ Args = match_parameters(S, Params),
+ #'Object'{def=Def} =
+ check_object(S, PV,
+ #'Object'{classname=ClassRef ,
+ def={po,{object,DefinedObject},Args}}),
+ ObjList = check_object_set_mk(Def, OSI),
+ {ObjList,OSI};
+check_object_set({'SingleValue',Val}, OSI) ->
+ check_object_set(Val, OSI);
+check_object_set({'ValueFromObject',{object,Object},FieldNames}, OSI) ->
+ #osi{st=S} = OSI,
+ case extract_field(S, Object, FieldNames) of
+ #'Object'{def=Def} ->
+ ObjList = check_object_set_mk(Def, OSI),
+ {ObjList,OSI};
+ _ ->
+ asn1_error(S, illegal_object)
+ end;
+check_object_set(#type{def=Def}, OSI) ->
+ check_object_set(Def, OSI);
+check_object_set({union,A0,B0}, OSI0) ->
+ {A,OSI1} = check_object_set(A0, OSI0),
+ {B,OSI} = check_object_set(B0, OSI1),
+ {A++B,OSI}.
+
+check_object_set_list([H|T], OSI0) ->
+ {Set0,OSI1} = check_object_set(H, OSI0),
+ {Set1,OSI2} = check_object_set_list(T, OSI1),
+ {Set0++Set1,OSI2};
+check_object_set_list([], OSI) ->
+ {[],OSI}.
+
+check_object_set_objset(#'ObjectSet'{set=Set}, OSI) ->
+ check_object_set_objset_list(Set, OSI).
+
+check_object_set_objset_list(Set, OSI) ->
+ check_object_set_objset_list_1(Set, OSI, []).
+
+check_object_set_objset_list_1(['EXTENSIONMARK'|T], OSI, Acc) ->
+ check_object_set_objset_list_1(T, OSI#osi{ext=true}, Acc);
+check_object_set_objset_list_1([H|T], OSI, Acc) ->
+ check_object_set_objset_list_1(T, OSI, [H|Acc]);
+check_object_set_objset_list_1([], OSI, Acc) ->
+ {Acc,OSI}.
+
+check_object_set_mk(Fields, OSI) ->
+ check_object_set_mk(no_mod, no_name, Fields, OSI).
+
+check_object_set_mk(M, N, Def, #osi{uniq={unique,undefined}}) ->
+ {_,_,Fields} = Def,
+ [{{M,N},no_unique_value,Fields}];
+check_object_set_mk(M, N, Def, #osi{uniq={UniqField,_}}) ->
+ {_,_,Fields} = Def,
+ case lists:keyfind(UniqField, 1, Fields) of
+ {UniqField,#valuedef{value=Val}} ->
+ [{{M,N},Val,Fields}];
+ false ->
+ case Fields of
+ [{_,#typedef{typespec=#'ObjectSet'{set=['EXTENSIONMARK']}}}] ->
+ %% FIXME: If object is missing the unique field and
+ %% only contains a reference to an empty object set,
+ %% we will remove the entire object as a workaround
+ %% to get X420 to compile. There should be a better
+ %% way.
+ [];
+ _ ->
+ [{{M,N},no_unique_value,Fields}]
+ end
+ end.
%% remove_duplicate_objects/1 remove duplicates of objects.
%% For instance may Set contain objects of same class from
%% different object sets that in fact might be duplicates.
-remove_duplicate_objects(Set) when is_list(Set) ->
- Pred = fun({A,B,_},{A,C,_}) when B =< C -> true;
- ({A,_,_},{B,_,_}) when A < B -> true;
- ('EXTENSIONMARK','EXTENSIONMARK') -> true;
- (T,A) when is_tuple(T),is_atom(A) -> true;% EXTENSIONMARK last in list
- (_,_) -> false
- end,
- lists:usort(Pred,Set).
+remove_duplicate_objects(S, Set0) when is_list(Set0) ->
+ Set1 = [{Id,Orig} || {_,Id,_}=Orig <- Set0],
+ Set2 = sofs:relation(Set1),
+ Set3 = sofs:relation_to_family(Set2),
+ Set = sofs:to_external(Set3),
+ remove_duplicate_objects_1(S, Set).
+
+remove_duplicate_objects_1(S, [{no_unique_value,Objs}|T]) ->
+ Objs ++ remove_duplicate_objects_1(S, T);
+remove_duplicate_objects_1(S, [{_,[_]=Objs}|T]) ->
+ Objs ++ remove_duplicate_objects_1(S, T);
+remove_duplicate_objects_1(S, [{Id,[_|_]=Objs}|T]) ->
+ MakeSortable = fun(What) -> sortable_type(S, What) end,
+ Tagged = order_tag_set(Objs, MakeSortable),
+ case lists:ukeysort(1, Tagged) of
+ [{_,Obj}] ->
+ [Obj|remove_duplicate_objects_1(S, T)];
+ [_|_] ->
+ asn1_error(S, {non_unique_object,Id})
+ end;
+remove_duplicate_objects_1(_, []) ->
+ [].
-%%
-extensionmark(L,true) ->
- case lists:member('EXTENSIONMARK',L) of
- true -> L;
- _ -> L ++ ['EXTENSIONMARK']
+order_tag_set([{_, _, Fields}=Orig|Fs], Fun) ->
+ Pair = {[{FId, traverse(F, Fun)} || {FId, F} <- Fields], Orig},
+ [Pair|order_tag_set(Fs, Fun)];
+order_tag_set([], _) -> [].
+
+sortable_type(S, #'Externaltypereference'{}=ERef) ->
+ try get_referenced_type(S, ERef) of
+ {_,#typedef{}=OI} ->
+ OI#typedef{pos=undefined,name=undefined}
+ catch
+ _:_ ->
+ ERef
end;
-extensionmark(L,_) ->
- L.
+sortable_type(_, #typedef{}=TD) ->
+ asn1ct:unset_pos_mod(TD#typedef{name=undefined});
+sortable_type(_, Type) ->
+ asn1ct:unset_pos_mod(Type).
+
+traverse(Structure0, Fun) ->
+ Structure = Fun(Structure0),
+ traverse_1(Structure, Fun).
+
+traverse_1(#typedef{typespec=TS0} = TD, Fun) ->
+ TS = traverse(TS0, Fun),
+ TD#typedef{typespec=TS};
+traverse_1(#valuedef{type=TS0} = VD, Fun) ->
+ TS = traverse(TS0, Fun),
+ VD#valuedef{type=TS};
+traverse_1(#type{def=TS0} = TD, Fun) ->
+ TS = traverse(TS0, Fun),
+ TD#type{def=TS};
+traverse_1(#'SEQUENCE'{components=Cs0} = Seq, Fun) ->
+ Cs = traverse_seq_set(Cs0, Fun),
+ Seq#'SEQUENCE'{components=Cs};
+traverse_1({'SEQUENCE OF',Type0}, Fun) ->
+ Type = traverse(Type0, Fun),
+ {'SEQUENCE OF',Type};
+traverse_1({'SET OF',Type0}, Fun) ->
+ Type = traverse(Type0, Fun),
+ {'SET OF',Type};
+traverse_1(#'SET'{components=Cs0} = Set, Fun) ->
+ Cs = traverse_seq_set(Cs0, Fun),
+ Set#'SET'{components=Cs};
+traverse_1({'CHOICE', Cs0}, Fun) ->
+ Cs = traverse_seq_set(Cs0, Fun),
+ {'CHOICE', Cs};
+traverse_1(Leaf, _) ->
+ Leaf.
+
+traverse_seq_set(List, Fun) when is_list(List) ->
+ traverse_seq_set_1(List, Fun);
+traverse_seq_set({Set, Ext}, Fun) ->
+ {traverse_seq_set_1(Set, Fun), traverse_seq_set_1(Ext, Fun)};
+traverse_seq_set({Set1, Set2, Set3}, Fun) ->
+ {traverse_seq_set_1(Set1, Fun),
+ traverse_seq_set_1(Set2, Fun),
+ traverse_seq_set_1(Set3, Fun)}.
+
+traverse_seq_set_1([#'ComponentType'{} = CT0|Cs], Fun) ->
+ CT = #'ComponentType'{typespec=TS0} = Fun(CT0),
+ TS = traverse(TS0, Fun),
+ [CT#'ComponentType'{typespec=TS}|traverse_seq_set_1(Cs, Fun)];
+traverse_seq_set_1([{'COMPONENTS OF', _} = CO0|Cs], Fun) ->
+ {'COMPONENTS OF', TS0} = Fun(CO0),
+ TS = traverse(TS0, Fun),
+ [{'COMPONENTS OF', TS}|traverse_seq_set_1(Cs, Fun)];
+traverse_seq_set_1([], _) ->
+ [].
-object_to_check(#typedef{typespec=ObjDef}) ->
+object_to_check(_, #typedef{typespec=ObjDef}) ->
ObjDef;
-object_to_check(#valuedef{type=ClassName,value=ObjectRef}) ->
+object_to_check(S, #valuedef{type=Class,value=ObjectRef}) ->
%% If the object definition is parsed as an object the ClassName
- %% is parsed as a type
- #'Object'{classname=ClassName#type.def,def=ObjectRef}.
-
-prepare_objset({'SingleValue',Set}) when is_list(Set) ->
- {set,Set,false};
-prepare_objset(L=['EXTENSIONMARK']) ->
- L;
-prepare_objset(Set) when is_list(Set) ->
- {set,Set,false};
-prepare_objset({{'SingleValue',Set},Ext}) ->
- {set,merge_sets(Set,Ext),true};
-%%prepare_objset({Set,Ext}) when is_list(Set),is_list(Ext) ->
-%% {set,lists:append([Set,Ext]),true};
-prepare_objset({Set,Ext}) when is_list(Set) ->
- {set,merge_sets(Set,Ext),true};
-prepare_objset({{object,definedsyntax,_ObjFields}=Set,Ext}) ->
- {set,merge_sets(Set, Ext),true};
-prepare_objset(ObjDef={object,definedsyntax,_ObjFields}) ->
- {set,[ObjDef],false};
-prepare_objset({ObjDef=#type{},Ext}) when is_list(Ext) ->
- {set,[ObjDef|Ext],true};
-prepare_objset({#type{}=Type,#type{}=Ext}) ->
- {set,[Type,Ext],true};
-prepare_objset(Ret) ->
- Ret.
-
-class_fields_optional_check(S,#classdef{typespec=ClassSpec}) ->
- Fields = ClassSpec#objectclass.fields,
- class_fields_optional_check1(S,Fields).
-
-class_fields_optional_check1(_S,[]) ->
- ok;
-class_fields_optional_check1(S,[{typefield,_,'OPTIONAL'}|Rest]) ->
- class_fields_optional_check1(S,Rest);
-class_fields_optional_check1(S,[{fixedtypevaluefield,_,_,_,'OPTIONAL'}|Rest]) ->
- class_fields_optional_check1(S,Rest);
-class_fields_optional_check1(S,[{fixedtypevaluesetfield,_,_,'OPTIONAL'}|Rest]) ->
- class_fields_optional_check1(S,Rest);
-class_fields_optional_check1(S,[{objectfield,_,_,_,'OPTIONAL'}|Rest]) ->
- class_fields_optional_check1(S,Rest);
-class_fields_optional_check1(S,[{objectsetfield,_,_,'OPTIONAL'}|Rest]) ->
- class_fields_optional_check1(S,Rest).
-
-%% ObjectSetFromObjects functionality
-
-%% The fieldname is a list of field names.They may be objects or
-%% object sets. If ObjectSet is an object set the resulting object set
-%% is the union of object sets if the last field name is an object
-%% set. If the last field is an object the resulting object set is
-%% the set of objects in ObjectSet.
-object_set_from_objects(S,RefedObjMod,FieldName,ObjectSet) ->
- object_set_from_objects(S,RefedObjMod,FieldName,ObjectSet,[]).
-object_set_from_objects(S,RefedObjMod,FieldName,ObjectSet,InterSect)
- when is_record(ObjectSet,'ObjectSet') ->
- #'ObjectSet'{class=Cl,set=Set} = ObjectSet,
- {_,ClassDef} = get_referenced_type(S,Cl),
- object_set_from_objects(S,RefedObjMod,ClassDef,FieldName,Set,InterSect,[]);
-object_set_from_objects(S,RefedObjMod,FieldName,Object,InterSect)
- when is_record(Object,'Object') ->
- #'Object'{classname=Cl,def=Def}=Object,
- object_set_from_objects(S,RefedObjMod,Cl,FieldName,[Def],InterSect,[]).
-object_set_from_objects(S,RefedObjMod,ClassDef,FieldName,['EXTENSIONMARK'|Os],
- InterSect,Acc) ->
- object_set_from_objects(S,RefedObjMod,ClassDef,FieldName,Os,InterSect,%%Acc);
- ['EXTENSIONMARK'|Acc]);
-object_set_from_objects(S,RefedObjMod,ClassDef,FieldName,[O|Os],InterSect,Acc) ->
- case object_set_from_objects2(S,mod_of_obj(RefedObjMod,element(1,O)),
- ClassDef,FieldName,element(3,O),InterSect) of
- ObjS when is_list(ObjS) ->
- object_set_from_objects(S,RefedObjMod,ClassDef,FieldName,Os,InterSect,ObjS++Acc);
- Obj ->
- object_set_from_objects(S,RefedObjMod,ClassDef,FieldName,Os,InterSect,[Obj|Acc])
- end;
-object_set_from_objects(_S,_RefedObjMod,_ClassDef,_FieldName,[],InterSect,Acc) ->
- %% For instance may Acc contain objects of same class from
- %% different object sets that in fact might be duplicates.
- remove_duplicate_objects(osfo_intersection(InterSect,Acc)).
-%% Acc.
-object_set_from_objects2(S,RefedObjMod,ClassDef,[{valuefieldreference,OName}],
- Fields,_InterSect) ->
- %% this is an object
- case lists:keysearch(OName,1,Fields) of
- {value,{_,TDef}} ->
- mk_object_set_from_object(S,RefedObjMod,TDef,ClassDef);
- _ ->
- [] % it may be an absent optional field
- end;
-object_set_from_objects2(S,RefedObjMod,ClassDef,[{typefieldreference,OSName}],
- Fields,_InterSect) ->
- %% this is an object set
- case lists:keysearch(OSName,1,Fields) of
- {value,{_,TDef}} ->
- case TDef#typedef.typespec of
- #'ObjectSet'{class=_NextClName,set=NextSet} ->%% = TDef#typedef.typespec,
- NextSet;
- #'Object'{def=_ObjDef} ->
- mk_object_set_from_object(S,RefedObjMod,TDef,ClassDef)
-%% ObjDef
- %% error({error,{internal,unexpected_object,TDef}})
- end;
- _ ->
- [] % it may be an absent optional field
- end;
-object_set_from_objects2(S,RefedObjMod,_ClassDef,[{valuefieldreference,OName}|Rest],
- Fields,InterSect) ->
- %% this is an object
- case lists:keysearch(OName,1,Fields) of
- {value,{_,TDef}} ->
- #'Object'{classname=NextClName,def=ODef}=TDef#typedef.typespec,
- {_,_,NextFields}=ODef,
- {_,NextClass} = get_referenced_type(S,NextClName),
- object_set_from_objects2(S,RefedObjMod,NextClass,Rest,NextFields,InterSect);
- _ ->
- []
- end;
-object_set_from_objects2(S,RefedObjMod,_ClassDef,[{typefieldreference,OSName}|Rest],
- Fields,InterSect) ->
- %% this is an object set
- Next = {NextClName,NextSet} =
- case lists:keysearch(OSName,1,Fields) of
- {value,{_,TDef}} when is_record(TDef,'ObjectSet') ->
- #'ObjectSet'{class=NextClN,set=NextS} = TDef,
- {NextClN,NextS};
- {value,{_,#typedef{typespec=OS}}} ->
- %% objectsets in defined syntax will come here as typedef{}
- %% #'ObjectSet'{class=NextClN,set=NextS} = OS,
- case OS of
- #'ObjectSet'{class=NextClN,set=NextS} ->
- {NextClN,NextS};
- #'Object'{classname=NextClN,def=NextDef} ->
- {NextClN,[NextDef]}
- end;
+ %% is parsed as a type.
+ case Class of
+ #type{def=#'Externaltypereference'{}=Def} ->
+ #'Object'{classname=Def,def=ObjectRef};
_ ->
- {[],[]}
- end,
- case Next of
- {[],[]} ->
- [];
- _ ->
- {_,NextClass} = get_referenced_type(S,NextClName),
- object_set_from_objects(S,RefedObjMod,NextClass,Rest,NextSet,InterSect,[])
- end.
-
-mk_object_set_from_object(S,RefedObjMod,TDef,Class) ->
- #'Object'{classname=_NextClName,def=ODef} = TDef#typedef.typespec,
- {_,_,NextFields}=ODef,
-
- UniqueFieldName =
- case (catch get_unique_fieldname(S,Class)) of
- {error,'__undefined_',_} -> {unique,undefined};
- {asn1,Msg,_} -> error({class,Msg,S});
- {'EXIT',Msg} -> error({class,{internal_error,Msg},S});
- {Other,_} -> Other
- end,
- VDef = get_unique_value(S,NextFields,UniqueFieldName),
- %% XXXXXXXXXXX
- case VDef of
- [] ->
- ['EXTENSIONMARK'];
- _ ->
- {{RefedObjMod,get_datastr_name(TDef)},VDef,NextFields}
+ asn1_error(S, illegal_object)
end.
-
-
-mod_of_obj(_RefedObjMod,{NewMod,ObjName})
- when is_atom(NewMod),is_atom(ObjName) ->
- NewMod;
-mod_of_obj(RefedObjMod,_) ->
- RefedObjMod.
-
-
-merge_sets(Root,{'SingleValue',Ext}) ->
- merge_sets(Root,Ext);
-merge_sets(Root,Ext) when is_list(Root),is_list(Ext) ->
- Root ++ Ext;
-merge_sets(Root,Ext) when is_list(Ext) ->
- [Root|Ext];
-merge_sets(Root,Ext) when is_list(Root) ->
- Root++[Ext];
-merge_sets(Root,Ext) ->
- [Root]++[Ext].
-
-reduce_objectset(ObjectSet,Exclusion) ->
- case Exclusion of
- {'SingleValue',#'Externalvaluereference'{value=Name}} ->
- case lists:keysearch(Name,1,ObjectSet) of
- {value,El} ->
- lists:subtract(ObjectSet,[El]);
- _ ->
- ObjectSet
- end
- end.
-
-%% Checks a list of objects or object sets and returns a list of selected
-%% information for the code generation.
-check_object_list(S,ClassRef,ObjectList) ->
- check_object_list(S,ClassRef,ObjectList,[]).
-
-check_object_list(S,ClassRef,[ObjOrSet|Objs],Acc) ->
- ?dbg("check_object_list: ~p~n",[ObjOrSet]),
- case ObjOrSet of
- ObjDef when is_tuple(ObjDef),(element(1,ObjDef)==object) ->
- Def =
- check_object(S,#typedef{typespec=ObjDef},
-% #'Object'{classname={objectclassname,ClassRef},
- #'Object'{classname=ClassRef,
- def=ObjDef}),
- check_object_list(S,ClassRef,Objs,[{{no_mod,no_name},Def#'Object'.def}|Acc]);
- {'SingleValue',Ref = #'Externalvaluereference'{}} ->
- ?dbg("{SingleValue,Externalvaluereference}~n",[]),
- {RefedMod,ObjName,
- #'Object'{def=Def}} = check_referenced_object(S,Ref),
- check_object_list(S,ClassRef,Objs,[{{RefedMod,ObjName},Def}|Acc]);
- ObjRef when is_record(ObjRef,'Externalvaluereference') ->
- ?dbg("Externalvaluereference~n",[]),
- {RefedMod,ObjName,
- #'Object'{def=Def}} = check_referenced_object(S,ObjRef),
- check_object_list(S,ClassRef,Objs,[{{RefedMod,ObjName},Def}|Acc]);
- {'ValueFromObject',{_,Object},FieldName} ->
- {_,Def} = get_referenced_type(S,Object),
- TypeDef = get_fieldname_element(S,Def,FieldName),
- (TypeDef#typedef.typespec)#'ObjectSet'.set;
- ObjSet when is_record(ObjSet,type) ->
- ObjSetDef =
- case ObjSet#type.def of
- Ref when is_record(Ref,'Externaltypereference') ->
- {_,D} = get_referenced_type(S,ObjSet#type.def),
- D;
- Other ->
- throw({asn1_error,{'unknown objecset',Other,S}})
- end,
- #'ObjectSet'{set=ObjectsInSet} =
- check_object(S,ObjSetDef,ObjSetDef#typedef.typespec),
- AccList = transform_set_to_object_list(ObjectsInSet,[]),
- check_object_list(S,ClassRef,Objs,AccList++Acc);
- union ->
- check_object_list(S,ClassRef,Objs,Acc);
- {pos,{objectset,_,DefinedObjectSet},Params} ->
- OSDef = #type{def={pt,DefinedObjectSet,Params}},
- #'ObjectSet'{set=Set} =
- check_object(S,ObjOrSet,#'ObjectSet'{class=ClassRef,
- set=OSDef}),
- check_object_list(S,ClassRef,Objs,Set ++ Acc);
- {pv,{simpledefinedvalue,DefinedObject},Params} ->
- Args = [match_parameters(S,Param,S#state.parameters)||
- Param<-Params],
- #'Object'{def=Def} =
- check_object(S,ObjOrSet,
- #'Object'{classname=ClassRef ,
- def={po,{object,DefinedObject},
- Args}}),
- check_object_list(S,ClassRef,Objs,[{{no_mod,no_name},Def}|Acc]);
- {'ObjectSetFromObjects',Os,FieldName} when is_tuple(Os) ->
- NewSet =
- check_ObjectSetFromObjects(S, element(tuple_size(Os), Os),
- FieldName,[]),
- check_object_list(S,ClassRef,Objs,NewSet++Acc);
- {{'ObjectSetFromObjects',Os,FieldName},InterSection}
- when is_tuple(Os) ->
- NewSet =
- check_ObjectSetFromObjects(S, element(tuple_size(Os), Os),
- FieldName,InterSection),
- check_object_list(S,ClassRef,Objs,NewSet++Acc);
- Other ->
- exit({error,{'unknown object',Other},S})
- end;
-%% Finally reverse the accumulated list and if there are any extension
-%% marks in the object set put one indicator of that in the end of the
-%% list.
-check_object_list(_,_,[],Acc) ->
- lists:reverse(Acc).
check_referenced_object(S,ObjRef)
when is_record(ObjRef,'Externalvaluereference')->
@@ -1213,195 +1002,134 @@ check_referenced_object(S,ObjRef)
check_object(update_state(S,RefedMod),ObjectDef,ObjectDef#typedef.typespec)}
end.
-check_ObjectSetFromObjects(S,ObjName,FieldName,InterSection) ->
- {RefedMod,TDef} = get_referenced_type(S,ObjName),
- ObjOrSet = check_object(update_state(S,RefedMod),TDef,TDef#typedef.typespec),
- InterSec = prepare_intersection(S,InterSection),
- _NewSet = object_set_from_objects(S,RefedMod,FieldName,ObjOrSet,InterSec).
+check_ObjectSetFromObjects(S, ObjName, Fields) ->
+ {_,Obj0} = get_referenced_type(S, ObjName),
+ case check_object(S, Obj0, Obj0#typedef.typespec) of
+ #'ObjectSet'{}=Obj1 ->
+ get_fieldname_set(S, Obj1, Fields);
+ #'Object'{classname=Class,
+ def={object,_,ObjFs}} ->
+ ObjSet = #'ObjectSet'{class=Class,
+ set=[{'_','_',ObjFs}]},
+ get_fieldname_set(S, ObjSet, Fields)
+ end.
-prepare_intersection(_S,[]) ->
- [];
-prepare_intersection(S,{'EXCEPT',ObjRef}) ->
- except_names(S,ObjRef);
-prepare_intersection(_S,T) ->
- exit({error,{internal_error,not_implemented,object_set_from_objects,T}}).
-except_names(_S,{'SingleValue',#'Externalvaluereference'{value=ObjName}}) ->
- [{except,ObjName}];
-except_names(_,T) ->
- exit({error,{internal_error,not_implemented,object_set_from_objects,T}}).
-
-osfo_intersection(InterSect,ObjList) ->
- Res = [X|| X = {{_,N},_,_} <- ObjList,
- lists:member({except,N},InterSect) == false],
- case lists:member('EXTENSIONMARK',ObjList) of
- true ->
- Res ++ ['EXTENSIONMARK'];
+%% get_type_from_object(State, ObjectOrObjectSet, [{RefType,FieldName}]) ->
+%% Type
+get_type_from_object(S, Object, FieldNames)
+ when is_record(Object, 'Externaltypereference');
+ is_record(Object, 'Externalvaluereference') ->
+ extract_field(S, Object, FieldNames).
+
+%% get_value_from_object(State, ObjectOrObjectSet, [{RefType,FieldName}]) ->
+%% UntaggedValue
+get_value_from_object(S, Def, FieldNames) ->
+ case extract_field(S, Def, FieldNames) of
+ #valuedef{value=Val} ->
+ Val;
+ {valueset,_}=Val ->
+ Val;
_ ->
- Res
+ asn1_error(S, illegal_value)
end.
-%% get_fieldname_element/3
-%% gets the type/value/object/... of the referenced element in FieldName
-%% FieldName is a list and may have more than one element.
-%% Each element in FieldName can be either {typefieldreference,AnyFieldName}
-%% or {valuefieldreference,AnyFieldName}
-%% Def is the def of the first object referenced by FieldName
-get_fieldname_element(S,Def,[{_RefType,FieldName}]) when is_record(Def,typedef) ->
- {_,_,ObjComps} = (Def#typedef.typespec)#'Object'.def,
- check_fieldname_element(S,lists:keysearch(FieldName,1,ObjComps));
-get_fieldname_element(S,Def,[{_RefType,FieldName}|Rest])
- when is_record(Def,typedef) ->
- %% As FieldName is followd by other FieldNames it has to be an
- %% object or objectset.
- {_,_,ObjComps} = (Def#typedef.typespec)#'Object'.def,
- NewDef = check_fieldname_element(S,lists:keysearch(FieldName,1,ObjComps)),
- ObjDef = fun(#'Object'{def=D}) -> D;
- (#'ObjectSet'{set=Set}) -> Set
- end
- (NewDef),
- case ObjDef of
+%% extract_field(State, ObjectOrObjectSet, [{RefType,FieldName}])
+%% RefType = typefieldreference | valuefieldreference
+%%
+%% Get the type, value, object, object set, or value set from the
+%% referenced object or object set. The list of field name tuples
+%% may have more than one element. All field names but the last
+%% refers to either an object or object set.
+
+extract_field(S, Def0, FieldNames) ->
+ {_,Def1} = get_referenced_type(S, Def0),
+ Def2 = check_object(S, Def1, Def1#typedef.typespec),
+ Def = Def1#typedef{typespec=Def2},
+ get_fieldname_element(S, Def, FieldNames).
+
+%% get_fieldname_element(State, Element, [{RefType,FieldName}]
+%% RefType = typefieldreference | valuefieldreference
+%%
+%% Get the type, value, object, object set, or value set from the referenced
+%% element. The list of field name tuples may have more than one element.
+%% All field names but the last refers to either an object or object set.
+
+get_fieldname_element(S, Object0, [{_RefType,FieldName}|Fields]) ->
+ Object = case Object0 of
+ #typedef{typespec=#'Object'{def=Obj}} -> Obj;
+ {_,_,_}=Obj -> Obj
+ end,
+ case check_fieldname_element(S, FieldName, Object) of
+ #'Object'{def=D} when Fields =/= [] ->
+ get_fieldname_element(S, D, Fields);
+ #'ObjectSet'{}=Set ->
+ get_fieldname_set(S, Set, Fields);
+ Result when Fields =:= [] ->
+ Result
+ end;
+get_fieldname_element(_S, Def, []) ->
+ Def.
+
+get_fieldname_set(S, #'ObjectSet'{set=Set0}, T) ->
+ get_fieldname_set_1(S, Set0, T, []).
+
+get_fieldname_set_1(S, ['EXTENSIONMARK'=Ext|T], Fields, Acc) ->
+ get_fieldname_set_1(S, T, Fields, [Ext|Acc]);
+get_fieldname_set_1(S, [H|T], Fields, Acc) ->
+ try get_fieldname_element(S, H, Fields) of
L when is_list(L) ->
- [get_fieldname_element(S,X,Rest) || X <- L];
- _ ->
- get_fieldname_element(S,ObjDef,Rest)
+ get_fieldname_set_1(S, T, Fields, L++Acc);
+ {valueset,L} ->
+ get_fieldname_set_1(S, T, Fields, L++Acc);
+ Other ->
+ get_fieldname_set_1(S, T, Fields, [Other|Acc])
+ catch
+ throw:{error,_} ->
+ get_fieldname_set_1(S, T, Fields, Acc)
end;
-get_fieldname_element(S,{object,_,Fields},[{_RefType,FieldName}|Rest]) ->
- NewDef = check_fieldname_element(S,lists:keysearch(FieldName,1,Fields)),
- get_fieldname_element(S,NewDef,Rest);
-get_fieldname_element(_S,Def,[]) ->
- Def;
-get_fieldname_element(_S,Def,[{_RefType,_FieldName}|_RestFName])
- when is_record(Def,typedef) ->
- ok.
+get_fieldname_set_1(_, [], _Fields, Acc) ->
+ case Acc of
+ [#valuedef{}|_] ->
+ {valueset,Acc};
+ _ ->
+ Acc
+ end.
-check_fieldname_element(S,{value,{_,Def}}) ->
- check_fieldname_element(S,Def);
-check_fieldname_element(S, #typedef{typespec=Ts}=TDef) ->
+check_fieldname_element(S, Name, {_,_,Fields}) ->
+ case lists:keyfind(Name, 1, Fields) of
+ {Name,Def} ->
+ check_fieldname_element_1(S, Def);
+ false ->
+ asn1_error(S, {undefined_field,Name})
+ end.
+
+check_fieldname_element_1(S, #typedef{typespec=Ts}=TDef) ->
case Ts of
#'Object'{} ->
check_object(S, TDef, Ts);
_ ->
check_type(S, TDef, Ts)
end;
-check_fieldname_element(S, #valuedef{}=VDef) ->
+check_fieldname_element_1(S, #valuedef{}=VDef) ->
try
check_value(S, VDef)
catch
- throw:{objectdef} ->
+ throw:{asn1_class, _} ->
#valuedef{checked=C,pos=Pos,name=N,type=Type,
value=Def} = VDef,
ClassName = Type#type.def,
NewSpec = #'Object'{classname=ClassName,def=Def},
NewDef = #typedef{checked=C,pos=Pos,name=N,typespec=NewSpec},
- check_fieldname_element(S, NewDef)
+ check_fieldname_element_1(S, NewDef)
end;
-check_fieldname_element(S,Eref)
- when is_record(Eref,'Externaltypereference');
- is_record(Eref,'Externalvaluereference') ->
- {_,TDef}=get_referenced_type(S,Eref),
- check_fieldname_element(S,TDef);
-check_fieldname_element(S,Other) ->
- throw({error,{assigned_object_error,"not_assigned_object",Other,S}}).
+check_fieldname_element_1(_S, {value_tag,Val}) ->
+ #valuedef{value=Val};
+check_fieldname_element_1(S, Eref)
+ when is_record(Eref, 'Externaltypereference');
+ is_record(Eref, 'Externalvaluereference') ->
+ {_,TDef} = get_referenced_type(S, Eref),
+ check_fieldname_element_1(S, TDef).
-transform_set_to_object_list([{Name,_UVal,Fields}|Objs],Acc) ->
- transform_set_to_object_list(Objs,[{Name,{object,generatesyntax,Fields}}|Acc]);
-transform_set_to_object_list(['EXTENSIONMARK'|Objs],Acc) ->
-%% transform_set_to_object_list(Objs,['EXTENSIONMARK'|Acc]);
- transform_set_to_object_list(Objs,Acc);
-transform_set_to_object_list([],Acc) ->
- Acc.
-
-get_unique_valuelist(_S,ObjSet,{unique,undefined}) -> % no unique field in object
- lists:map(fun({N,{_,_,F}})->{N,no_unique_value,F};
- (V={_,_,_}) ->V;
- ({A,B}) -> {A,no_unique_value,B}
- end, ObjSet);
-get_unique_valuelist(S,ObjSet,{UFN,Opt}) ->
- get_unique_vlist(S,ObjSet,UFN,Opt,[]).
-
-
-get_unique_vlist(_S,[],_,_,[]) ->
- ['EXTENSIONMARK'];
-get_unique_vlist(S,[],_,Opt,Acc) ->
- case catch check_uniqueness(remove_duplicate_objects(Acc)) of
- {asn1_error,_} when Opt =/= 'OPTIONAL' ->
- error({'ObjectSet',"not unique objects in object set",S});
- {asn1_error,_} ->
- lists:reverse(Acc);
- _ ->
- lists:reverse(Acc)
- end;
-get_unique_vlist(S,['EXTENSIONMARK'|Rest],UniqueFieldName,Opt,Acc) ->
- get_unique_vlist(S,Rest,UniqueFieldName,Opt,Acc);
-get_unique_vlist(S,[{ObjName,Obj}|Rest],UniqueFieldName,Opt,Acc) ->
- {_,_,Fields} = Obj,
- NewObjInf =
- case get_unique_value(S,Fields,UniqueFieldName) of
- #valuedef{value=V} -> [{ObjName,V,Fields}];
- [] -> []; % maybe the object only was a reference to an
- % empty object set.
- no_unique_value -> [{ObjName,no_unique_value,Fields}]
- end,
- get_unique_vlist(S,Rest,UniqueFieldName,Opt,NewObjInf++Acc);
-
-get_unique_vlist(S,[V={_,_,_}|Rest],UniqueFieldName,Opt,Acc) ->
- get_unique_vlist(S,Rest,UniqueFieldName,Opt,[V|Acc]).
-
-get_unique_value(S,Fields,UniqueFieldName) ->
- Module = S#state.mname,
- case lists:keysearch(UniqueFieldName,1,Fields) of
- {value,Field} ->
- case element(2,Field) of
- VDef when is_record(VDef,valuedef) ->
- VDef;
- {'ValueFromObject',Object,Name} ->
- case Object of
- {object,Ext} when is_record(Ext,'Externaltypereference') ->
- OtherModule = Ext#'Externaltypereference'.module,
- ExtObjName = Ext#'Externaltypereference'.type,
- ObjDef = asn1_db:dbget(OtherModule,ExtObjName),
- ObjSpec = ObjDef#typedef.typespec,
- get_unique_value(OtherModule,element(3,ObjSpec),Name);
- {object,{_,_,ObjName}} ->
- ObjDef = asn1_db:dbget(Module,ObjName),
- ObjSpec = ObjDef#typedef.typespec,
- get_unique_value(Module,element(3,ObjSpec),Name);
- {po,Object,_Params} ->
- exit({error,{'parameterized object not implemented yet',
- Object},S})
- end;
- Value when is_atom(Value);is_number(Value) ->
- #valuedef{value=Value,module=Module};
- {'CHOICE',{C,Value}} when is_atom(C) ->
- %% #valuedef{value=normalize_value(S,element(3,Field),VDef,[])}
- case Value of
- Scalar when is_atom(Scalar);is_number(Scalar) ->
- #valuedef{value=Value,module=Module};
- Eref = #'Externalvaluereference'{} ->
- element(2,get_referenced_type(S,Eref))
- end
- end;
- false ->
- case Fields of
- [{_,#typedef{typespec=#'ObjectSet'{set=['EXTENSIONMARK']}}}] ->
- [];
- _ ->
- no_unique_value
- end
- end.
-
-check_uniqueness(NameValueList) ->
- check_uniqueness1(lists:keysort(2,NameValueList)).
-
-check_uniqueness1([]) ->
- true;
-check_uniqueness1([_]) ->
- true;
-check_uniqueness1([{_,N,_},{_,N,_}|_Rest]) ->
- throw({asn1_error,{'objects in set must have unique values in UNIQUE fields',N}});
-check_uniqueness1([_|Rest]) ->
- check_uniqueness1(Rest).
-
%% instantiate_po/4
%% ClassDef is the class of Object,
%% Object is the Parameterized object, which is referenced,
@@ -1410,8 +1138,7 @@ check_uniqueness1([_|Rest]) ->
instantiate_po(S=#state{parameters=_OldArgs},_ClassDef,Object,ArgsList) when is_record(Object,pobjectdef) ->
FormalParams = get_pt_args(Object),
MatchedArgs = match_args(S,FormalParams,ArgsList,[]),
-% NewS = S#state{type=Object,parameters=MatchedArgs++OldArgs},
- NewS = S#state{type=Object,parameters=MatchedArgs},
+ NewS = S#state{parameters=MatchedArgs},
check_object(NewS,Object,#'Object'{classname=Object#pobjectdef.class,
def=Object#pobjectdef.def}).
@@ -1421,20 +1148,14 @@ instantiate_po(S=#state{parameters=_OldArgs},_ClassDef,Object,ArgsList) when is_
%% on the right side of the assignment,
%% ArgsList is the list of actual parameters, i.e. real objects
instantiate_pos(S=#state{parameters=_OldArgs},ClassRef,ObjectSetDef,ArgsList) ->
-% ClassName = ClassDef#classdef.name,
FormalParams = get_pt_args(ObjectSetDef),
OSet = case get_pt_spec(ObjectSetDef) of
- {valueset,Set} ->
-% #'ObjectSet'{class=name2Extref(S#state.mname,
-% ClassName),set=Set};
- #'ObjectSet'{class=ClassRef,set=Set};
- Set when is_record(Set,'ObjectSet') -> Set;
- _ ->
- error({type,"parameterized object set failure",S})
+ {valueset,Set} -> #'ObjectSet'{class=ClassRef,set=Set};
+ Set when is_record(Set,'ObjectSet') -> Set;
+ _ -> asn1_error(S, invalid_objectset)
end,
MatchedArgs = match_args(S,FormalParams,ArgsList,[]),
-% NewS = S#state{type=ObjectSetDef,parameters=MatchedArgs++OldArgs},
- NewS = S#state{type=ObjectSetDef,parameters=MatchedArgs},
+ NewS = S#state{parameters=MatchedArgs},
check_object(NewS,ObjectSetDef,OSet).
@@ -1468,7 +1189,7 @@ gen_incl1(S,Fields,[C|CFields]) ->
check_object(S,TDef,TDef#typedef.typespec);
ERef ->
{_,T} = get_referenced_type(S,ERef),
- check_object(S,T,object_to_check(T))
+ check_object(S, T, object_to_check(S, T))
end,
case gen_incl(S,ObjDef#'Object'.def,
ClassFields) of
@@ -1485,7 +1206,7 @@ gen_incl1(S,Fields,[C|CFields]) ->
end.
get_objclass_fields(S,Eref=#'Externaltypereference'{}) ->
- {_,ClassDef} = get_referenced_type(S,Eref),
+ {_,ClassDef} = get_referenced_type(S,Eref, true),
get_objclass_fields(S,ClassDef);
get_objclass_fields(S,CD=#classdef{typespec=#'Externaltypereference'{}}) ->
get_objclass_fields(S,CD#classdef.typespec);
@@ -1501,10 +1222,10 @@ gen_incl_set(S,Fields,#typedef{typespec=#type{def=Eref}})
{_,CDef} = get_referenced_type(S,Eref),
gen_incl_set(S,Fields,CDef);
gen_incl_set(S,Fields,ClassDef) ->
- case catch get_unique_fieldname(S,ClassDef) of
- Tuple when tuple_size(Tuple) =:= 3 ->
+ case get_unique_fieldname(S, ClassDef) of
+ no_unique ->
false;
- _ ->
+ {_, _} ->
gen_incl_set1(S,Fields,
(ClassDef#classdef.typespec)#objectclass.fields)
end.
@@ -1529,475 +1250,390 @@ gen_incl_set1(S,[Object|Rest],CFields)->
gen_incl_set1(S,Rest,CFields)
end.
-check_objectdefn(S,Def,CDef) when is_record(CDef,classdef) ->
- WithSyntax = (CDef#classdef.typespec)#objectclass.syntax,
- ClassFields = (CDef#classdef.typespec)#objectclass.fields,
+
+%%%
+%%% Check an object definition.
+%%%
+
+check_objectdefn(S, Def, #classdef{typespec=ObjClass}) ->
+ #objectclass{syntax=Syntax0,fields=ClassFields} = ObjClass,
case Def of
{object,defaultsyntax,Fields} ->
- check_defaultfields(S,Fields,ClassFields);
+ check_defaultfields(S, Fields, ClassFields);
{object,definedsyntax,Fields} ->
- {_,WSSpec} = WithSyntax,
- NewFields =
- case catch( convert_definedsyntax(S,Fields,WSSpec,
- ClassFields,[])) of
- {asn1,{_ErrorType,ObjToken,ClassToken}} ->
- throw({asn1,{'match error in object',ObjToken,
- 'found in object',ClassToken,'found in class'}});
- Err={asn1,_} -> throw(Err);
- Err={'EXIT',_} -> throw(Err);
- DefaultFields when is_list(DefaultFields) ->
- DefaultFields
- end,
- {object,defaultsyntax,NewFields};
- {object,_ObjectId} -> % This is a DefinedObject
- fixa;
- Other ->
- exit({error,{objectdefn,Other}})
+ Syntax = get_syntax(S, Syntax0, ClassFields),
+ case match_syntax(S, Syntax, Fields, []) of
+ {match,NewFields,[]} ->
+ {object,defaultsyntax,NewFields};
+ {match,_,[What|_]} ->
+ syntax_match_error(S, What);
+ {nomatch,[What|_]} ->
+ syntax_match_error(S, What);
+ {nomatch,[]} ->
+ syntax_match_error(S)
+ end
+ end.
+
+
+%%%
+%%% Pre-process the simplified syntax so that it can be more
+%%% easily matched.
+%%%
+
+get_syntax(_, {preprocessed_syntax,Syntax}, _) ->
+ Syntax;
+get_syntax(S, {'WITH SYNTAX',Syntax}, ClassFields) ->
+ preprocess_syntax(S, Syntax, ClassFields).
+
+preprocess_syntax(S, Syntax0, Cs) ->
+ Syntax = preprocess_syntax_1(S, Syntax0, Cs, true),
+ Present0 = preprocess_get_fields(Syntax, []),
+ Present1 = lists:sort(Present0),
+ Present = ordsets:from_list(Present1),
+ case Present =:= Present1 of
+ false ->
+ Dupl = Present1 -- Present,
+ asn1_error(S, {syntax_duplicated_fields,Dupl});
+ true ->
+ ok
+ end,
+ Mandatory0 = get_mandatory_class_fields(Cs),
+ Mandatory = ordsets:from_list(Mandatory0),
+ case ordsets:subtract(Mandatory, Present) of
+ [] ->
+ Syntax;
+ [_|_]=Missing ->
+ asn1_error(S, {syntax_missing_mandatory_fields,Missing})
end.
+preprocess_syntax_1(S, [H|T], Cs, Mandatory) when is_list(H) ->
+ [{optional,preprocess_syntax_1(S, H, Cs, false)}|
+ preprocess_syntax_1(S, T, Cs, Mandatory)];
+preprocess_syntax_1(S, [{valuefieldreference,Name}|T], Cs, Mandatory) ->
+ F = preprocess_check_field(S, Name, Cs, Mandatory),
+ [F|preprocess_syntax_1(S, T, Cs, Mandatory)];
+preprocess_syntax_1(S, [{typefieldreference,Name}|T], Cs, Mandatory) ->
+ F = preprocess_check_field(S, Name, Cs, Mandatory),
+ [F|preprocess_syntax_1(S, T, Cs, Mandatory)];
+preprocess_syntax_1(S,[{Token,_}|T], Cs, Mandatory) when is_atom(Token) ->
+ [{token,Token}|preprocess_syntax_1(S, T, Cs, Mandatory)];
+preprocess_syntax_1(S, [Token|T], Cs, Mandatory) when is_atom(Token) ->
+ [{token,Token}|preprocess_syntax_1(S, T, Cs, Mandatory)];
+preprocess_syntax_1(_, [], _, _) -> [].
+
+preprocess_check_field(S, Name, Cs, Mandatory) ->
+ case lists:keyfind(Name, 2, Cs) of
+ Tuple when is_tuple(Tuple) ->
+ case not Mandatory andalso is_mandatory_class_field(Tuple) of
+ true ->
+ asn1_error(S, {syntax_mandatory_in_optional_group,Name});
+ false ->
+ {field,Tuple}
+ end;
+ false ->
+ asn1_error(S, {syntax_undefined_field,Name})
+ end.
+
+preprocess_get_fields([{field,F}|T], Acc) ->
+ Name = element(2, F),
+ preprocess_get_fields(T, [Name|Acc]);
+preprocess_get_fields([{optional,L}|T], Acc) ->
+ preprocess_get_fields(T, preprocess_get_fields(L, Acc));
+preprocess_get_fields([_|T], Acc) ->
+ preprocess_get_fields(T, Acc);
+preprocess_get_fields([], Acc) ->
+ Acc.
+
+%%%
+%%% Match the actual fields in the object definition to
+%%% the pre-processed simplified syntax.
+%%%
+
+match_syntax(S, [{token,Token}|T], [A|As]=Args, Acc) ->
+ case A of
+ {word_or_setting,_,#'Externaltypereference'{type=Token}} ->
+ match_syntax(S, T, As, Acc);
+ {Token,Line} when is_integer(Line) ->
+ match_syntax(S, T, As, Acc);
+ _ ->
+ {nomatch,Args}
+ end;
+match_syntax(S, [{field,Field}|T]=Fs, [A|As0]=Args0, Acc) ->
+ try match_syntax_type(S, Field, A) of
+ {match,Match} ->
+ match_syntax(S, T, As0, lists:reverse(Match)++Acc);
+ {params,_Name,#ptypedef{args=Params}=P,Ref} ->
+ {Args,As} = lists:split(length(Params), As0),
+ Val = match_syntax_params(S, P, Ref, Args),
+ match_syntax(S, Fs, [Val|As], Acc)
+ catch
+ _:_ ->
+ {nomatch,Args0}
+ end;
+match_syntax(S, [{optional,L}|T], As0, Acc) ->
+ case match_syntax(S, L, As0, []) of
+ {match,Match,As} ->
+ match_syntax(S, T, As, lists:reverse(Match)++Acc);
+ {nomatch,As0} ->
+ match_syntax(S, T, As0, Acc);
+ {nomatch,_}=NoMatch ->
+ NoMatch
+ end;
+match_syntax(_, [_|_], [], _Acc) ->
+ {nomatch,[]};
+match_syntax(_, [], As, Acc) ->
+ {match,Acc,As}.
+
+match_syntax_type(S, Type, {value_tag,Val}) ->
+ match_syntax_type(S, Type, Val);
+match_syntax_type(S, Type, {setting,_,Val}) ->
+ match_syntax_type(S, Type, Val);
+match_syntax_type(S, Type, {word_or_setting,_,Val}) ->
+ match_syntax_type(S, Type, Val);
+match_syntax_type(_S, _Type, {Atom,Line})
+ when is_atom(Atom), is_integer(Line) ->
+ throw(nomatch);
+match_syntax_type(S, {fixedtypevaluefield,Name,#type{}=T,_,_}=Type,
+ #'Externalvaluereference'{}=ValRef0) ->
+ try get_referenced_type(S, ValRef0) of
+ {M,#valuedef{}=ValDef} ->
+ match_syntax_type(update_state(S, M), Type, ValDef)
+ catch
+ throw:{error,_} ->
+ ValRef = #valuedef{name=Name,
+ type=T,
+ value=ValRef0,
+ module=S#state.mname},
+ match_syntax_type(S, Type, ValRef)
+ end;
+match_syntax_type(S, {fixedtypevaluefield,Name,#type{},_,_}, #valuedef{}=Val0) ->
+ Val = check_value(S, Val0),
+ {match,[{Name,Val}]};
+match_syntax_type(S, {fixedtypevaluefield,Name,#type{},_,_},
+ {'ValueFromObject',{object,Object},FieldNames}) ->
+ Val = extract_field(S, Object, FieldNames),
+ {match,[{Name,Val}]};
+match_syntax_type(S, {fixedtypevaluefield,Name,#type{}=T,_,_}=Type, Any) ->
+ ValDef = #valuedef{name=Name,type=T,value=Any,module=S#state.mname},
+ match_syntax_type(S, Type, ValDef);
+match_syntax_type(_S, {fixedtypevaluesetfield,Name,#type{},_}, Any) ->
+ {match,[{Name,Any}]};
+match_syntax_type(S, {objectfield,Name,_,_,_}, #'Externalvaluereference'{}=Ref) ->
+ {M,Obj} = get_referenced_type(S, Ref),
+ check_object(S, Obj, object_to_check(S, Obj)),
+ {match,[{Name,Ref#'Externalvaluereference'{module=M}}]};
+match_syntax_type(S, {objectfield,Name,Class,_,_}, {object,_,_}=ObjDef) ->
+ InlinedObjName = list_to_atom(lists:concat([S#state.tname,
+ '_',Name])),
+ ObjSpec = #'Object'{classname=Class,def=ObjDef},
+ CheckedObj = check_object(S, #typedef{typespec=ObjSpec}, ObjSpec),
+ InlObj = #typedef{checked=true,name=InlinedObjName,typespec=CheckedObj},
+ ObjKey = {InlinedObjName, InlinedObjName},
+ insert_once(S, inlined_objects, ObjKey),
+ %% Which module to use here? Could it be other than top_module?
+ asn1_db:dbput(get(top_module), InlinedObjName, InlObj),
+ {match,[{Name,InlObj}]};
+match_syntax_type(_S, {objectfield,Name,_,_,_}, Any) ->
+ {match,[{Name,Any}]};
+match_syntax_type(S, {objectsetfield,Name,CDef0,_}, Any) ->
+ CDef = case CDef0 of
+ #type{def=CDef1} -> CDef1;
+ CDef1 -> CDef1
+ end,
+ case match_syntax_objset(S, Any, CDef) of
+ #typedef{typespec=#'ObjectSet'{}=Ts0}=Def ->
+ Ts = check_object(S, Def, Ts0),
+ {match,[{Name,Def#typedef{checked=true,typespec=Ts}}]};
+ _ ->
+ syntax_match_error(S, Any)
+ end;
+match_syntax_type(S, {typefield,Name0,_}, #type{def={pt,_,_}=Def}=Actual) ->
+ %% This is an inlined type. If constructed type, save in data base.
+ T = check_type(S, #typedef{typespec=Actual}, Actual),
+ #'Externaltypereference'{type=PtName} = element(2, Def),
+ NameList = [PtName,S#state.tname],
+ Name = list_to_atom(asn1ct_gen:list2name(NameList)),
+ NewTDef = #typedef{checked=true,name=Name,typespec=T},
+ asn1_db:dbput(S#state.mname, Name, NewTDef),
+ insert_once(S, parameterized_objects, {Name,type,NewTDef}),
+ {match,[{Name0,NewTDef}]};
+match_syntax_type(S, {typefield,Name,_}, #type{def=#'ObjectClassFieldType'{}}=Actual) ->
+ T = check_type(S, #typedef{typespec=Actual}, Actual),
+ {match,[{Name,ocft_def(T)}]};
+match_syntax_type(S, {typefield,Name,_}, #type{def=#'Externaltypereference'{}=Ref}) ->
+ match_syntax_external(S, Name, Ref);
+match_syntax_type(S, {typefield,Name,_}, #type{def=Def}=Actual) ->
+ T = check_type(S, #typedef{typespec=Actual}, Actual),
+ TypeName = asn1ct_gen:type(asn1ct_gen:get_inner(Def)),
+ {match,[{Name,#typedef{checked=true,name=TypeName,typespec=T}}]};
+match_syntax_type(S, {typefield,Name,_}, #'Externaltypereference'{}=Ref) ->
+ match_syntax_external(S, Name, Ref);
+match_syntax_type(_S, {variabletypevaluefield,Name,_,_}, Any) ->
+ {match,[{Name,Any}]};
+match_syntax_type(_S, {variabletypevaluesetfield,Name,_,_}, Any) ->
+ {match,[{Name,Any}]};
+match_syntax_type(_S, _Type, _Actual) ->
+ throw(nomatch).
+
+match_syntax_params(S0, #ptypedef{name=Name}=PtDef,
+ #'Externaltypereference'{module=M,type=N}=ERef0, Args) ->
+ S = S0#state{mname=M,module=load_asn1_module(S0, M),tname=Name},
+ Type = check_type(S, PtDef, #type{def={pt,ERef0,Args}}),
+ ERefName = new_reference_name(N),
+ ERef = #'Externaltypereference'{type=ERefName,module=S0#state.mname},
+ TDef = #typedef{checked=true,name=ERefName,typespec=Type},
+ insert_once(S0, parameterized_objects, {ERefName,type,TDef}),
+ asn1_db:dbput(S0#state.mname, ERef#'Externaltypereference'.type, TDef),
+ ERef.
+
+match_syntax_external(#state{mname=Mname}=S0, Name, Ref0) ->
+ {M,T0} = get_referenced_type(S0, Ref0),
+ Ref1 = Ref0#'Externaltypereference'{module=M},
+ case T0 of
+ #ptypedef{} ->
+ {params,Name,T0,Ref1};
+ #typedef{checked=false}=TDef0 when Mname =/= M ->
+ %% This typedef is an imported type (or maybe a set.asn
+ %% compilation).
+ S = S0#state{mname=M,module=load_asn1_module(S0, M),
+ tname=get_datastr_name(TDef0)},
+ Type = check_type(S, TDef0, TDef0#typedef.typespec),
+ TDef = TDef0#typedef{checked=true,typespec=Type},
+ asn1_db:dbput(M, get_datastr_name(TDef), TDef),
+ {match,[{Name,merged_name(S, Ref1)}]};
+ TDef ->
+ %% This might be a renamed type in a set of specs,
+ %% so rename the ref.
+ Type = asn1ct:get_name_of_def(TDef),
+ Ref = Ref1#'Externaltypereference'{type=Type},
+ {match,[{Name,Ref}]}
+ end.
+
+match_syntax_objset(_S, {element_set,_,_}=Set, ClassDef) ->
+ make_objset(ClassDef, Set);
+match_syntax_objset(S, #'Externaltypereference'{}=Ref, _) ->
+ {_,T} = get_referenced_type(S, Ref),
+ T;
+match_syntax_objset(S, #'Externalvaluereference'{}=Ref, _) ->
+ {_,T} = get_referenced_type(S, Ref),
+ T;
+match_syntax_objset(_, [_|_]=Set, ClassDef) ->
+ make_objset(ClassDef, Set);
+match_syntax_objset(S, {object,definedsyntax,Words}, ClassDef) ->
+ case Words of
+ [Word] ->
+ match_syntax_objset_1(S, Word, ClassDef);
+ [_|_] ->
+ %% More than one word does not make sense.
+ none
+ end;
+match_syntax_objset(S, #type{def=#'Externaltypereference'{}=Set}, ClassDef) ->
+ match_syntax_objset(S, Set, ClassDef);
+match_syntax_objset(_, #type{}, _) ->
+ none.
+
+match_syntax_objset_1(S, {setting,_,Set}, ClassDef) ->
+ %% Word that starts with an uppercase letter.
+ match_syntax_objset(S, Set, ClassDef);
+match_syntax_objset_1(S, {word_or_setting,_,Set}, ClassDef) ->
+ %% Word in uppercase/hyphens only.
+ match_syntax_objset(S, Set, ClassDef);
+match_syntax_objset_1(S, #type{def={'TypeFromObject', {object,Object}, FNs}},
+ ClassDef) ->
+ Set = extract_field(S, Object, FNs),
+ [_|_] = Set,
+ #typedef{checked=true,typespec=#'ObjectSet'{class=ClassDef,set=Set}};
+match_syntax_objset_1(_, #type{def=#'ObjectClassFieldType'{}}=Set, ClassDef) ->
+ make_objset(ClassDef, Set);
+match_syntax_objset_1(_, {object,_,_}=Object, ClassDef) ->
+ make_objset(ClassDef, [Object]).
+
+make_objset(ClassDef, Set) ->
+ #typedef{typespec=#'ObjectSet'{class=ClassDef,set=Set}}.
+
+syntax_match_error(S) ->
+ asn1_error(S, syntax_nomatch).
+
+syntax_match_error(S, What0) ->
+ What = printable_string(What0),
+ asn1_error(S, {syntax_nomatch,What}).
+
+printable_string(Def) ->
+ printable_string_1(Def).
+
+printable_string_1({word_or_setting,_,Def}) ->
+ printable_string_1(Def);
+printable_string_1({value_tag,V}) ->
+ printable_string_1(V);
+printable_string_1({#seqtag{val=Val1},Val2}) ->
+ atom_to_list(Val1) ++ " " ++ printable_string_1(Val2);
+printable_string_1(#type{def=Def}) ->
+ atom_to_list(asn1ct_gen:get_inner(Def));
+printable_string_1(#'Externaltypereference'{type=Type}) ->
+ atom_to_list(Type);
+printable_string_1(#'Externalvaluereference'{value=Type}) ->
+ atom_to_list(Type);
+printable_string_1({Atom,Line}) when is_atom(Atom), is_integer(Line) ->
+ q(Atom);
+printable_string_1({object,definedsyntax,L}) ->
+ q(string:join([printable_string_1(Item) || Item <- L], " "));
+printable_string_1([_|_]=Def) ->
+ case lists:all(fun is_integer/1, Def) of
+ true ->
+ lists:flatten(io_lib:format("~p", [Def]));
+ false ->
+ q(string:join([printable_string_1(Item) || Item <- Def], " "))
+ end;
+printable_string_1(Def) ->
+ lists:flatten(io_lib:format("~p", [Def])).
+
+q(S) ->
+ lists:concat(["\"",S,"\""]).
+
check_defaultfields(S, Fields, ClassFields) ->
Present = ordsets:from_list([F || {F,_} <- Fields]),
Mandatory0 = get_mandatory_class_fields(ClassFields),
Mandatory = ordsets:from_list(Mandatory0),
All = ordsets:from_list([element(2, F) || F <- ClassFields]),
- #state{type=T,tname=Obj} = S,
+ #state{tname=Obj} = S,
case ordsets:subtract(Present, All) of
[] ->
ok;
[_|_]=Invalid ->
- asn1_error(S, T, {invalid_fields,Invalid,Obj})
+ asn1_error(S, {invalid_fields,Invalid,Obj})
end,
case ordsets:subtract(Mandatory, Present) of
[] ->
check_defaultfields_1(S, Fields, ClassFields, []);
[_|_]=Missing ->
- asn1_error(S, T, {missing_mandatory_fields,Missing,Obj})
+ asn1_error(S, {missing_mandatory_fields,Missing,Obj})
end.
check_defaultfields_1(_S, [], _ClassFields, Acc) ->
{object,defaultsyntax,lists:reverse(Acc)};
check_defaultfields_1(S, [{FName,Spec}|Fields], ClassFields, Acc) ->
CField = lists:keyfind(FName, 2, ClassFields),
- {NewField,RestFields} =
- convert_to_defaultfield(S, FName, [Spec|Fields], CField),
- check_defaultfields_1(S, RestFields, ClassFields, [NewField|Acc]).
+ {match,Match} = match_syntax_type(S, CField, Spec),
+ check_defaultfields_1(S, Fields, ClassFields, Match++Acc).
-convert_definedsyntax(_S,[],[],_ClassFields,Acc) ->
- lists:reverse(Acc);
-convert_definedsyntax(S,Fields,WithSyntax,ClassFields,Acc) ->
- {MatchedField,RestFields,RestWS} =
- match_field(S,Fields,WithSyntax,ClassFields),
- if
- is_list(MatchedField) ->
- convert_definedsyntax(S,RestFields,RestWS,ClassFields,
- lists:append(MatchedField,Acc));
- true ->
- convert_definedsyntax(S,RestFields,RestWS,ClassFields,
- [MatchedField|Acc])
- end.
+get_mandatory_class_fields(ClassFields) ->
+ [element(2, F) || F <- ClassFields,
+ is_mandatory_class_field(F)].
-get_mandatory_class_fields([{fixedtypevaluefield,Name,_,_,'MANDATORY'}|T]) ->
- [Name|get_mandatory_class_fields(T)];
-get_mandatory_class_fields([{objectfield,Name,_,_,'MANDATORY'}|T]) ->
- [Name|get_mandatory_class_fields(T)];
-get_mandatory_class_fields([{objectsetfield,Name,_,'MANDATORY'}|T]) ->
- [Name|get_mandatory_class_fields(T)];
-get_mandatory_class_fields([{typefield,Name,'MANDATORY'}|T]) ->
- [Name|get_mandatory_class_fields(T)];
-get_mandatory_class_fields([{variabletypevaluefield,Name,_,'MANDATORY'}|T]) ->
- [Name|get_mandatory_class_fields(T)];
-get_mandatory_class_fields([{variabletypevaluesetfield,
- Name,_,'MANDATORY'}|T]) ->
- [Name|get_mandatory_class_fields(T)];
-get_mandatory_class_fields([_|T]) ->
- get_mandatory_class_fields(T);
-get_mandatory_class_fields([]) -> [].
-
-match_field(S,Fields,WithSyntax,ClassFields) ->
- match_field(S,Fields,WithSyntax,ClassFields,[]).
-
-match_field(S,Fields,[W|Ws],ClassFields,Acc) when is_list(W) ->
- case catch(match_optional_field(S,Fields,W,ClassFields,[])) of
- {'EXIT',_} ->
- match_field(Fields,Ws,ClassFields,Acc); %% add S
-%% {[Result],RestFields} ->
-%% {Result,RestFields,Ws};
- {Result,RestFields} when is_list(Result) ->
- {Result,RestFields,Ws};
- _ ->
- match_field(S,Fields,Ws,ClassFields,Acc)
- end;
-match_field(S,Fields,WithSyntax,ClassFields,_Acc) ->
- match_mandatory_field(S,Fields,WithSyntax,ClassFields,[]).
-
-match_optional_field(_S,RestFields,[],_,Ret) ->
- {Ret,RestFields};
-%% An additional optional field within an optional field
-match_optional_field(S,Fields,[W|Ws],ClassFields,Ret) when is_list(W) ->
- case catch match_optional_field(S,Fields,W,ClassFields,[]) of
- {'EXIT',_} when length(Ws) > 0 ->
- match_optional_field(S,Fields,Ws,ClassFields,Ret);
- {'EXIT',_} ->
- {Ret,Fields};
- {asn1,{optional_matcherror,_,_}} when length(Ws) > 0 ->
- match_optional_field(S,Fields,Ws,ClassFields,Ret);
- {asn1,{optional_matcherror,_,_}} ->
- {Ret,Fields};
- {OptionalField,RestFields} ->
- match_optional_field(S,RestFields,Ws,ClassFields,
- lists:append(OptionalField,Ret))
- end;
-%% identify and skip word
-match_optional_field(S,[{_,_,#'Externaltypereference'{type=WorS}}|Rest],
- [WorS|Ws],ClassFields,Ret) ->
- match_optional_field(S,Rest,Ws,ClassFields,Ret);
-match_optional_field(S,[],_,ClassFields,Ret) ->
- match_optional_field(S,[],[],ClassFields,Ret);
-%% identify and skip comma
-match_optional_field(S,[{WorS,_}|Rest],[{WorS,_}|Ws],ClassFields,Ret) ->
- match_optional_field(S,Rest,Ws,ClassFields,Ret);
-%% am optional setting inside another optional setting may be "double-listed"
-match_optional_field(S,[Setting],DefinedSyntax,ClassFields,Ret)
- when is_list(Setting) ->
- match_optional_field(S,Setting,DefinedSyntax,ClassFields,Ret);
-%% identify and save field data
-match_optional_field(S,[Setting|Rest],[{_,W}|Ws],ClassFields,Ret) ->
- ?dbg("matching optional field setting: ~p with user friendly syntax: ~p~n",[Setting,W]),
- WorS =
- case Setting of
- Type when is_record(Type,type) -> Type;
- {'ValueFromObject',_,_} -> Setting;
- {object,_,_} -> Setting;
- {_,_,WordOrSetting} -> WordOrSetting;
- Other -> Other
- end,
- case lists:keysearch(W,2,ClassFields) of
- false ->
- throw({asn1,{optional_matcherror,WorS,W}});
- {value,CField} ->
- {NewField,RestFields} =
- convert_to_defaultfield(S,W,[WorS|Rest],CField),
- match_optional_field(S,RestFields,Ws,ClassFields,[NewField|Ret])
- end;
-match_optional_field(_S,[WorS|_Rest],[W|_Ws],_ClassFields,_Ret) ->
- throw({asn1,{optional_matcherror,WorS,W}}).
-
-match_mandatory_field(_S,[],[],_,[Acc]) ->
- {Acc,[],[]};
-match_mandatory_field(_S,[],[],_,Acc) ->
- {Acc,[],[]};
-match_mandatory_field(S,[],[H|T],CF,Acc) when is_list(H) ->
- match_mandatory_field(S,[],T,CF,Acc);
-match_mandatory_field(_S,[],WithSyntax,_,_Acc) ->
- throw({asn1,{mandatory_matcherror,[],WithSyntax}});
-%match_mandatory_field(_S,Fields,WithSyntax=[W|_Ws],_ClassFields,[Acc]) when is_list(W) ->
-match_mandatory_field(_S,Fields,WithSyntax=[W|_Ws],_ClassFields,Acc) when is_list(W), length(Acc) >= 1 ->
- {Acc,Fields,WithSyntax};
-%% identify and skip word
-%%match_mandatory_field(S,[{_,_,WorS}|Rest],
-match_mandatory_field(S,[{_,_,#'Externaltypereference'{type=WorS}}|Rest],
- [WorS|Ws],ClassFields,Acc) ->
- match_mandatory_field(S,Rest,Ws,ClassFields,Acc);
-%% identify and skip comma
-match_mandatory_field(S,[{WorS,_}|Rest],[{WorS,_}|Ws],ClassFields,Ret) ->
- match_mandatory_field(S,Rest,Ws,ClassFields,Ret);
-%% identify and save field data
-match_mandatory_field(S,[Setting|Rest],[{_,W}|Ws],ClassFields,Acc) ->
- ?dbg("matching field setting: ~p with user friendly syntax: ~p~n",[Setting,W]),
- WorS =
- case Setting of
- {object,_,_} -> Setting;
- {_,_,WordOrSetting} -> WordOrSetting;
- Type when is_record(Type,type) -> Type;
- Other -> Other
- end,
- case lists:keysearch(W,2,ClassFields) of
- false ->
- throw({asn1,{mandatory_matcherror,WorS,W}});
- {value,CField} ->
- {NewField,RestFields} =
- convert_to_defaultfield(S,W,[WorS|Rest],CField),
- match_mandatory_field(S,RestFields,Ws,ClassFields,[NewField|Acc])
- end;
-
-match_mandatory_field(_S,[WorS|_Rest],[W|_Ws],_ClassFields,_Acc) ->
- throw({asn1,{mandatory_matcherror,WorS,W}}).
-
-%% Converts a field of an object from defined syntax to default syntax
-%% A field may be a type, a fixed type value, an object, an objectset,
-%%
-convert_to_defaultfield(S,ObjFieldName,[OFS|RestOFS],CField)->
- ?dbg("convert field: ~p of type: ~p~n",[ObjFieldName,element(1,CField)]),
- CurrMod = S#state.mname,
- Strip_value_tag =
- fun({value_tag,ValueSetting}) -> ValueSetting;
- (VS) -> VS
- end,
- ObjFieldSetting = Strip_value_tag(OFS),
- RestSettings = [Strip_value_tag(X)||X <- RestOFS],
- case element(1,CField) of
- typefield ->
- TypeDef=
- case ObjFieldSetting of
- TypeRec when is_record(TypeRec,type) -> TypeRec#type.def;
- TDef when is_record(TDef,typedef) ->
- TDef#typedef{checked=true,
- typespec=check_type(S,TDef,
- TDef#typedef.typespec)};
- _ -> ObjFieldSetting
- end,
- {Type,SettingsLeft} =
- if
- is_record(TypeDef,typedef) -> {TypeDef,RestSettings};
- is_record(TypeDef,'ObjectClassFieldType') ->
- T=check_type(S,#typedef{typespec=ObjFieldSetting},ObjFieldSetting),
- {oCFT_def(S,T),RestSettings};
-% #typedef{checked=true,name=Name,typespec=IT};
- is_tuple(TypeDef), element(1,TypeDef) == pt ->
- %% this is an inlined type. If constructed
- %% type save in data base
- T=check_type(S,#typedef{typespec=ObjFieldSetting},ObjFieldSetting),
- #'Externaltypereference'{type=PtName} =
- element(2,TypeDef),
- NameList = [PtName,S#state.tname],
- NewName = list_to_atom(asn1ct_gen:list2name(NameList)),
- NewTDef=#typedef{checked=true,name=NewName,
- typespec=T},
- asn1_db:dbput(S#state.mname,NewName,NewTDef),
- %%asn1ct_gen:insert_once(parameterized_objects,{NewName,type,NewTDef}),
- insert_once(S,parameterized_objects,
- {NewName,type,NewTDef}),
- {NewTDef,RestSettings};
- is_tuple(TypeDef), element(1,TypeDef)=='SelectionType' ->
- T=check_type(S,#typedef{typespec=ObjFieldSetting},
- ObjFieldSetting),
- Name = type_name(S,T),
- {#typedef{checked=true,name=Name,typespec=T},RestSettings};
- true ->
- case asn1ct_gen:type(asn1ct_gen:get_inner(TypeDef)) of
- ERef = #'Externaltypereference'{module=CurrMod} ->
- {RefMod,T} = get_referenced_type(S,ERef),
- check_and_save(S,ERef#'Externaltypereference'{module=RefMod},T,RestSettings);
-
- ERef = #'Externaltypereference'{} ->
- {RefMod,T} = get_referenced_type(S,ERef),
- check_and_save(S,ERef#'Externaltypereference'{module=RefMod},T,RestSettings);
- Bif when Bif=={primitive,bif};Bif=={constructed,bif} ->
- T = check_type(S,#typedef{typespec=ObjFieldSetting},
- ObjFieldSetting),
- {#typedef{checked=true,name=Bif,typespec=T},RestSettings};
- _ ->
- %this case should not happen any more
- {Mod,T} =
- get_referenced_type(S,#'Externaltypereference'{module=S#state.mname,type=ObjFieldSetting}),
- case Mod of
- CurrMod ->
- {T,RestSettings};
- ExtMod ->
- #typedef{name=Name} = T,
- {T#typedef{name={ExtMod,Name}},RestSettings}
- end
- end
- end,
- {{ObjFieldName,Type},SettingsLeft};
- fixedtypevaluefield ->
- case ObjFieldName of
- Val when is_atom(Val) ->
- %% ObjFieldSetting can be a value,an objectidentifiervalue,
- %% an element in an enumeration or namednumberlist etc.
- ValRef =
- case ObjFieldSetting of
- ValSetting=#'Externalvaluereference'{} ->
- ValSetting;
- {'ValueFromObject',{_,ObjRef},FieldName} ->
- {_,Object} = get_referenced_type(S,ObjRef),
- ChObject = check_object(S,Object,
- Object#typedef.typespec),
- get_fieldname_element(S,Object#typedef{typespec=ChObject},
- FieldName);
- ValSetting = #valuedef{} ->
- ValSetting;
- ValSetting ->
- #valuedef{type=element(3,CField),
- value=ValSetting,
- module=S#state.mname}
- end,
- ?dbg("fixedtypevaluefield ValRef: ~p~n",[ValRef]),
- case ValRef of
- #valuedef{} ->
- {{ObjFieldName,check_value(S,ValRef)},RestSettings};
- _ ->
- ValDef =
- case catch get_referenced_type(S,ValRef) of
- {error,_} ->
- NewValDef =
- #valuedef{name=Val,
- type=element(3,CField),
- value=ObjFieldSetting,
- module=S#state.mname},
- check_value(S,NewValDef);
- {M,VDef} when is_record(VDef,valuedef) ->
- check_value(update_state(S,M),
- %%S#state{mname=M},
- VDef);%% XXX
- {M,VDef} ->
- check_value(update_state(S,M),
- %%S#state{mname=M},
- #valuedef{name=Val,
- type=element(3,CField),
- value=VDef,
- module=M})
- end,
- {{ObjFieldName,ValDef},RestSettings}
- end;
- Val ->
- {{ObjFieldName,Val},RestSettings}
- end;
- fixedtypevaluesetfield ->
- {{ObjFieldName,ObjFieldSetting},RestSettings};
- objectfield ->
- CheckObject =
- fun(O) ->
- O#typedef{checked=true,typespec=
- check_object(S,O,O#typedef.typespec)}
- end,
- ObjectSpec =
- case ObjFieldSetting of
- Ref when is_record(Ref,'Externalvaluereference') ->
- %% The object O might be a #valuedef{} if
- %% e.g. the definition looks like
- %% myobj SOMECLASS ::= referencedObject
- {M,O} = get_referenced_type(S,Ref),
- check_object(S,O,object_to_check(O)),
- Ref#'Externalvaluereference'{module=M};
-
- {'ValueFromObject',{_,ObjRef},FieldName} ->
- %% This is an ObjectFromObject
- {_,Object} = get_referenced_type(S,ObjRef),
- ChObject = check_object(S,Object,
- Object#typedef.typespec),
- ObjFromObj=
- get_fieldname_element(S,Object#typedef{
- typespec=ChObject},
- FieldName),
- CheckObject(ObjFromObj);
- ObjDef={object,_,_} ->
- %% An object defined inlined in another object
- %% class is an objectfield, that implies that
- %% {objectsetfield,TypeFieldName,DefinedObjecClass,
- %% OptionalitySpec}
- %% DefinedObjecClass = #'Externaltypereference'{}|
- %% 'TYPE-IDENTIFIER' | 'ABSTRACT-SYNTAX'
- ClassName = element(3,CField),
- InlinedObjName=
- list_to_atom(lists:concat([S#state.tname]++
- ['_',ObjFieldName])),
-
- ObjSpec = #'Object'{classname=ClassName,
- def=ObjDef},
- CheckedObj=
- check_object(S,#typedef{typespec=ObjSpec},ObjSpec),
- InlObj = #typedef{checked=true,name=InlinedObjName,
- typespec=CheckedObj},
- ObjKey = {InlinedObjName,InlinedObjName},
- %% asn1ct_gen:insert_once(inlined_objects,ObjKey),
- insert_once(S,inlined_objects,ObjKey),
- %% Which module to use here? Could it be other than top_module ?
- %% asn1_db:dbput(S#state.mname,InlinedObjName,InlObj),
- asn1_db:dbput(get(top_module),InlinedObjName,InlObj),
- InlObj;
- #type{def=Eref} when is_record(Eref,'Externaltypereference') ->
- {_,O} = get_referenced_type(S,Eref),
- CheckObject(O);
- Other ->
- {_,O} = get_referenced_type(S,#'Externaltypereference'{module=S#state.mname,type=Other}),
- CheckObject(O)
- end,
- {{ObjFieldName,ObjectSpec},RestSettings};
- variabletypevaluefield ->
- {{ObjFieldName,ObjFieldSetting},RestSettings};
- variabletypevaluesetfield ->
- {{ObjFieldName,ObjFieldSetting},RestSettings};
-%% objectset_or_fixedtypevalueset_field ->
-%% ok;
- objectsetfield ->
- ObjSetSpec = get_objectset_def(S,ObjFieldSetting,CField),
- ?dbg("objectsetfield, ObjSetSpec:~p~n",[ObjSetSpec]),
- {{ObjFieldName,
- ObjSetSpec#typedef{checked=true,
- typespec=check_object(S,ObjSetSpec,
- ObjSetSpec#typedef.typespec)}},RestSettings}
- end.
-
-get_objectset_def(S,Ref,CField)
- when is_record(Ref,'Externaltypereference');
- is_record(Ref,'Externalvaluereference') ->
- {_M,T}=get_referenced_type(S,Ref),
- get_objectset_def2(S,T,CField);
-get_objectset_def(S,ObjectList,CField) when is_list(ObjectList) ->
- %% an objctset defined in the object,though maybe
- %% parsed as a SequenceOfValue
- %% The ObjectList may be a list of references to
- %% objects, a ValueFromObject
- ?dbg("objectsetfield: ~p~n",[CField]),
- get_objectset_def2(S,ObjectList,CField);
-get_objectset_def(S,'EXTENSIONMARK',CField) ->
- ?dbg("objectsetfield: ~p~n",[CField]),
- get_objectset_def2(S,['EXTENSIONMARK'],CField);
-get_objectset_def(_S,ObjFieldSetting={'SingleValue',_},CField) ->
- %% a Union of defined objects
- ?dbg("objectsetfield, SingleValue~n",[]),
- union_of_defed_objs(CField,ObjFieldSetting);
-get_objectset_def(_S,ObjFieldSetting={{'SingleValue',_},_},CField) ->
- %% a Union of defined objects
- ?dbg("objectsetfield, SingleValue~n",[]),
- union_of_defed_objs(CField,ObjFieldSetting);
-get_objectset_def(S,{object,_,[#type{def={'TypeFromObject',
- {object,RefedObj},
- FieldName}}]},_CField) ->
- %% This case occurs when an ObjectSetFromObjects
- %% production is used
- {_M,Def} = get_referenced_type(S,RefedObj),
- get_fieldname_element(S,Def,FieldName);
-get_objectset_def(S,{object,_,[{setting,_,ERef}]},CField)
- when is_record(ERef,'Externaltypereference') ->
- {_,T} = get_referenced_type(S,ERef),
- get_objectset_def2(S,T,CField);
-get_objectset_def(S,#type{def=ERef},_CField)
- when is_record(ERef,'Externaltypereference') ->
- {_,T} = get_referenced_type(S,ERef),
- T;
-get_objectset_def(S,ObjFieldSetting,CField)
- when is_atom(ObjFieldSetting) ->
- ERef = #'Externaltypereference'{module=S#state.mname,
- type=ObjFieldSetting},
- {_,T} = get_referenced_type(S,ERef),
- get_objectset_def2(S,T,CField).
-
-get_objectset_def2(_S,T = #typedef{typespec=#'Object'{}},_CField) ->
- #typedef{typespec=#'Object'{classname=Class,def=Def}} = T,
- T#typedef{typespec=#'ObjectSet'{class=Class,set=[Def]}};
-get_objectset_def2(_S,Set,CField) when is_list(Set) ->
- {_,_,Type,_} = CField,
- ClassDef = Type#type.def,
- #typedef{typespec=#'ObjectSet'{class=ClassDef,
- set=Set}};
-get_objectset_def2(_S,T = #typedef{typespec=#'ObjectSet'{}},_CField) ->
- T;
-get_objectset_def2(S,T,_CField) ->
- asn1ct:warning("get_objectset_def2: uncontrolled object set structure:~n~p~n",
- [T],S,"get_objectset_def2: uncontrolled object set structure").
-
-type_name(S,#type{def=Def}) ->
- CurrMod = S#state.mname,
- case asn1ct_gen:type(asn1ct_gen:get_inner(Def)) of
- #'Externaltypereference'{module=CurrMod,type=Name} ->
- Name;
- #'Externaltypereference'{module=Mod,type=Name} ->
- {Mod,Name};
- Bif when Bif=={primitive,bif};Bif=={constructed,bif} ->
- Bif
- end.
+is_mandatory_class_field({fixedtypevaluefield,_,_,_,'MANDATORY'}) ->
+ true;
+is_mandatory_class_field({objectfield,_,_,_,'MANDATORY'}) ->
+ true;
+is_mandatory_class_field({objectsetfield,_,_,'MANDATORY'}) ->
+ true;
+is_mandatory_class_field({typefield,_,'MANDATORY'}) ->
+ true;
+is_mandatory_class_field({variabletypevaluefield,_,_,'MANDATORY'}) ->
+ true;
+is_mandatory_class_field({variabletypevaluesetfield,_,_,'MANDATORY'}) ->
+ true;
+is_mandatory_class_field(_) ->
+ false.
merged_name(#state{inputmodules=[]},ERef) ->
ERef;
@@ -2013,38 +1649,18 @@ merged_name(S,ERef=#'Externaltypereference'{module=M}) ->
ERef
end.
-oCFT_def(S,T) ->
- case get_OCFT_inner(S,T) of
- ERef=#'Externaltypereference'{} -> ERef;
- {Name,Type} -> #typedef{checked=true,name=Name,typespec=Type};
- 'ASN1_OPEN_TYPE' ->
- #typedef{checked=true,typespec=T#type{def='ASN1_OPEN_TYPE'}}
- end.
-
-get_OCFT_inner(_S,T) ->
-% Module=S#state.mname,
- Def = T#type.def,
- case Def#'ObjectClassFieldType'.type of
+ocft_def(#type{def=#'ObjectClassFieldType'{type=OCFT}}=T) ->
+ case OCFT of
{fixedtypevaluefield,_,InnerType} ->
case asn1ct_gen:type(asn1ct_gen:get_inner(InnerType#type.def)) of
- Bif when Bif=={primitive,bif};Bif=={constructed,bif} ->
- {Bif,InnerType};
- ERef = #'Externaltypereference'{} ->
- ERef
+ Bif when Bif =:= {primitive,bif}; Bif =:= {constructed,bif} ->
+ #typedef{checked=true,name=Bif,typespec=InnerType};
+ #'Externaltypereference'{}=Ref ->
+ Ref
end;
- 'ASN1_OPEN_TYPE' -> 'ASN1_OPEN_TYPE'
+ 'ASN1_OPEN_TYPE' ->
+ #typedef{checked=true,typespec=T#type{def='ASN1_OPEN_TYPE'}}
end.
-
-
-
-union_of_defed_objs({_,_,_ObjClass=#type{def=ClassDef},_},ObjFieldSetting) ->
- #typedef{typespec=#'ObjectSet'{class = ClassDef,
- set = ObjFieldSetting}};
-union_of_defed_objs({_,_,DefObjClassRef,_},ObjFieldSetting)
- when is_record(DefObjClassRef,'Externaltypereference') ->
- #typedef{typespec=#'ObjectSet'{class = DefObjClassRef,
- set = ObjFieldSetting}}.
-
check_value(OldS,V) when is_record(V,pvaluesetdef) ->
#pvaluesetdef{checked=Checked,type=Type} = V,
@@ -2068,8 +1684,7 @@ check_value(OldS,V) when is_record(V,typedef) ->
#typedef{typespec=TS} = V,
case TS of
#'ObjectSet'{class=ClassRef} ->
- {RefM,TSDef} = get_referenced_type(OldS,ClassRef),
- %%IsObjectSet(TSDef);
+ {_RefM,TSDef} = get_referenced_type(OldS, ClassRef),
case TSDef of
#classdef{} -> throw({objectsetdef});
#typedef{typespec=#type{def=Eref}} when
@@ -2077,14 +1692,12 @@ check_value(OldS,V) when is_record(V,typedef) ->
%% This case if the class reference is a defined
%% reference to class
check_value(OldS,V#typedef{typespec=TS#'ObjectSet'{class=Eref}});
- #typedef{} ->
+ #typedef{typespec=HostType} ->
% an ordinary value set with a type in #typedef.typespec
- ValueSet = TS#'ObjectSet'.set,
- Type=check_type(OldS,TSDef,TSDef#typedef.typespec),
- Value = check_value(OldS,#valuedef{type=Type,
- value=ValueSet,
- module=RefM}),
- {valueset,Type#type{constraint=Value#valuedef.value}}
+ ValueSet0 = TS#'ObjectSet'.set,
+ Constr = check_constraints(OldS, HostType, [ValueSet0]),
+ Type = check_type(OldS,TSDef,TSDef#typedef.typespec),
+ {valueset,Type#type{constraint=Constr}}
end;
_ ->
throw({objectsetdef})
@@ -2104,11 +1717,11 @@ check_value(S, #valuedef{}=V) ->
end.
check_valuedef(#state{recordtopname=TopName}=S0, V0) ->
- #valuedef{name=Name,type=Vtype,value=Value,module=ModName} = V0,
+ #valuedef{name=Name,type=Vtype0,value=Value,module=ModName} = V0,
V = V0#valuedef{checked=true},
+ Vtype = check_type(S0, #typedef{name=Name,typespec=Vtype0},Vtype0),
Def = Vtype#type.def,
- Constr = Vtype#type.constraint,
- S1 = S0#state{type=Vtype,tname=Def,value=V0,vname=Name},
+ S1 = S0#state{tname=Def},
SVal = update_state(S1, ModName),
case Def of
#'Externaltypereference'{type=RecName}=Ext ->
@@ -2116,9 +1729,8 @@ check_valuedef(#state{recordtopname=TopName}=S0, V0) ->
%% If V isn't a value but an object Type is a #classdef{}
S2 = update_state(S1, RefM),
case Type of
- #classdef{} ->
- throw({objectdef});
- #typedef{typespec=TypeSpec} ->
+ #typedef{typespec=TypeSpec0}=TypeDef ->
+ TypeSpec = check_type(S2, TypeDef, TypeSpec0),
S3 = case is_contextswitchtype(Type) of
true ->
S2;
@@ -2135,7 +1747,7 @@ check_valuedef(#state{recordtopname=TopName}=S0, V0) ->
V#valuedef{type=Type}),
V#valuedef{value=CheckedVal}
end;
- 'ANY' ->
+ 'ASN1_OPEN_TYPE' ->
{opentypefieldvalue,ANYType,ANYValue} = Value,
CheckedV = check_value(SVal,#valuedef{name=Name,
type=ANYType,
@@ -2143,19 +1755,12 @@ check_valuedef(#state{recordtopname=TopName}=S0, V0) ->
module=ModName}),
V#valuedef{value=CheckedV#valuedef.value};
'INTEGER' ->
- ok = validate_integer(SVal, Value, [], Constr),
V#valuedef{value=normalize_value(SVal, Vtype, Value, [])};
- {'INTEGER',NamedNumberList} ->
- ok = validate_integer(SVal, Value, NamedNumberList, Constr),
+ {'INTEGER',_NamedNumberList} ->
V#valuedef{value=normalize_value(SVal, Vtype, Value, [])};
#'SEQUENCE'{} ->
- {ok,SeqVal} = convert_external(SVal, Value),
+ {ok,SeqVal} = convert_external(SVal, Vtype, Value),
V#valuedef{value=normalize_value(SVal, Vtype, SeqVal, TopName)};
- {'SelectionType',SelName,SelT} ->
- CheckedT = check_selectiontype(SVal, SelName, SelT),
- NewV = V#valuedef{type=CheckedT},
- SelVDef = check_value(S1#state{value=NewV}, NewV),
- V#valuedef{value=SelVDef#valuedef.value};
_ ->
V#valuedef{value=normalize_value(SVal, Vtype, Value, TopName)}
end.
@@ -2169,179 +1774,97 @@ is_contextswitchtype(#typedef{name='CHARACTER STRING'}) ->
is_contextswitchtype(_) ->
false.
-% validate_integer(S,{identifier,Pos,Id},NamedNumberList,Constr) ->
-% case lists:keysearch(Id,1,NamedNumberList) of
-% {value,_} -> ok;
-% false -> error({value,"unknown NamedNumber",S})
-% end;
-%% This case occurs when there is a valuereference
-%% validate_integer(S=#state{mname=M},
-%% #'Externalvaluereference'{module=M,value=Id}=Ref,
-validate_integer(S,#'Externalvaluereference'{value=Id}=Ref,
- NamedNumberList,Constr) ->
- case lists:keysearch(Id,1,NamedNumberList) of
- {value,_} -> ok;
- false -> validate_integer_ref(S,Ref,NamedNumberList,Constr)
- %%error({value,"unknown NamedNumber",S})
- end;
-validate_integer(S,Id,NamedNumberList,Constr) when is_atom(Id) ->
- case lists:keysearch(Id,1,NamedNumberList) of
- {value,_} -> ok;
- false -> validate_integer_ref(S,Id,NamedNumberList,Constr)
- %error({value,"unknown NamedNumber",S})
+%%%
+%%% Start of OBJECT IDENTFIER/RELATIVE-OID validation.
+%%%
+
+validate_objectidentifier(S, OidType, #'Externalvaluereference'{}=Id) ->
+ %% Must be an OBJECT IDENTIFIER or RELATIVE-OID depending on OidType.
+ get_oid_value(S, OidType, false, Id);
+validate_objectidentifier(S, OidType, {'ValueFromObject',{object,Obj},Fields}) ->
+ %% Must be an OBJECT IDENTIFIER/RELATIVE-OID depending on OidType.
+ case extract_field(S, Obj, Fields) of
+ #valuedef{checked=true,value=Value,type=Type} when is_tuple(Value) ->
+ _ = get_oid_type(S, OidType, Type),
+ Value;
+ _ ->
+ asn1_error(S, {illegal_oid,OidType})
end;
-validate_integer(_S,Value,_NamedNumberList,Constr) when is_integer(Value) ->
- check_integer_range(Value,Constr).
-
-validate_integer_ref(S,Id,_,_) when is_atom(Id) ->
- error({value,"unknown integer referens",S});
-validate_integer_ref(S,Ref,NamedNumberList,Constr) ->
- case get_referenced_type(S,Ref) of
- {M,V} when is_record(V,valuedef) ->
- NewS = update_state(S,M),
- case check_value(NewS,V) of
- #valuedef{type=#type{def='INTEGER'},value=Value} ->
- validate_integer(NewS,Value,NamedNumberList,Constr);
- _Err -> error({value,"unknown integer referens",S})
+validate_objectidentifier(S, OidType,
+ [{#seqtag{module=Mod,pos=Pos,val=Atom},Val}]) ->
+ %% This case is when an OBJECT IDENTIFIER value has been parsed as a
+ %% SEQUENCE value.
+ Rec = #'Externalvaluereference'{pos=Pos,
+ module=Mod,
+ value=Atom},
+ validate_oid(S, OidType, [Rec,Val], []);
+validate_objectidentifier(S, OidType, [_|_]=L0) ->
+ validate_oid(S, OidType, L0, []);
+validate_objectidentifier(S, OidType, _) ->
+ asn1_error(S, {illegal_oid,OidType}).
+
+get_oid_value(S, OidType, AllowInteger, #'Externalvaluereference'{}=Id) ->
+ case get_referenced_type(S, Id) of
+ {_,#valuedef{checked=Checked,type=Type,value=V}} ->
+ case get_oid_type(S, OidType, Type) of
+ 'INTEGER' when not AllowInteger ->
+ asn1_error(S, {illegal_oid,OidType});
+ _ when Checked ->
+ V;
+ 'INTEGER' ->
+ V;
+ _ ->
+ validate_objectidentifier(S, OidType, V)
end;
_ ->
- error({value,"unknown integer referens",S})
+ asn1_error(S, {illegal_oid,OidType})
end.
-
-
-
-check_integer_range(_Int, Constr) when is_list(Constr) ->
- ok.
-%%------------
-%% This can be removed when the old parser is removed
-%% The function removes 'space' atoms from the list
-
-is_space_list([H],Acc) ->
- lists:reverse([H|Acc]);
-is_space_list([H,space|T],Acc) ->
- is_space_list(T,[H|Acc]);
-is_space_list([],Acc) ->
- lists:reverse(Acc);
-is_space_list([H|T],Acc) ->
- is_space_list(T,[H|Acc]).
-
-validate_objectidentifier(S,OID,ERef,C)
- when is_record(ERef,'Externalvaluereference') ->
- validate_objectidentifier(S,OID,[ERef],C);
-validate_objectidentifier(S,OID,Tup,C) when is_tuple(Tup) ->
- validate_objectidentifier(S,OID,tuple_to_list(Tup),C);
-validate_objectidentifier(S,OID,L,_) ->
- NewL = is_space_list(L,[]),
- case validate_objectidentifier1(S,OID,NewL) of
- NewL2 when is_list(NewL2) ->{ok,list_to_tuple(NewL2)};
- Other -> {ok,Other}
- end.
-
-validate_objectidentifier1(S, OID, [Id|T])
- when is_record(Id,'Externalvaluereference') ->
- case catch get_referenced_type(S,Id) of
- {M,V} when is_record(V,valuedef) ->
- NewS = update_state(S,M),
- case check_value(NewS,V) of
- #valuedef{type=#type{def=ERef},checked=true,
- value=Value} when is_tuple(Value) ->
- case is_object_id(OID,NewS,ERef) of
- true ->
- %% T must be a RELATIVE-OID
- validate_oid(true,NewS, rel_oid, T, lists:reverse(tuple_to_list(Value)));
- _ ->
- error({value, {"illegal "++to_string(OID),[Id|T]}, S})
- end;
- _ ->
- error({value, {"illegal "++to_string(OID),[Id|T]}, S})
- end;
- _ ->
- validate_oid(true,S, OID, [Id|T], [])
- end;
-validate_objectidentifier1(S,OID,V) ->
- validate_oid(true,S,OID,V,[]).
-
-validate_oid(false, S, OID, V, Acc) ->
- error({value, {"illegal "++to_string(OID), V,Acc}, S});
-validate_oid(_,_, _, [], Acc) ->
- lists:reverse(Acc);
-validate_oid(_, S, OID, [Value|Vrest], Acc) when is_integer(Value) ->
- validate_oid(valid_objectid(OID,Value,Acc),S, OID, Vrest, [Value|Acc]);
-validate_oid(_, S, OID, [{'NamedNumber',_Name,Value}|Vrest], Acc)
+validate_oid(S, OidType, [], Acc) ->
+ Oid = lists:reverse(Acc),
+ validate_oid_path(S, OidType, Oid),
+ list_to_tuple(Oid);
+validate_oid(S, OidType, [Value|Vrest], Acc) when is_integer(Value) ->
+ validate_oid(S, OidType, Vrest, [Value|Acc]);
+validate_oid(S, OidType, [{'NamedNumber',_Name,Value}|Vrest], Acc)
when is_integer(Value) ->
- validate_oid(valid_objectid(OID,Value,Acc), S, OID, Vrest, [Value|Acc]);
-validate_oid(_, S, OID, [Id|Vrest], Acc)
- when is_record(Id,'Externalvaluereference') ->
- case catch get_referenced_type(S, Id) of
- {M,V} when is_record(V,valuedef) ->
- NewS = update_state(S,M),
- NewVal = case check_value(NewS, V) of
- #valuedef{checked=true,value=Value} ->
- fun(Int) when is_integer(Int) -> [Int];
- (L) when is_list(L) -> L;
- (T) when is_tuple(T) -> tuple_to_list(T)
- end (Value);
- _ ->
- error({value, {"illegal "++to_string(OID),
- [Id|Vrest],Acc}, S})
- end,
- case NewVal of
- List when is_list(List) ->
- validate_oid(valid_objectid(OID,NewVal,Acc), NewS,
- OID, Vrest,lists:reverse(NewVal)++Acc);
- _ ->
- NewVal
- end;
- _ ->
+ validate_oid(S, OidType, Vrest, [Value|Acc]);
+validate_oid(S, OidType, [#'Externalvaluereference'{}=Id|Vrest], Acc) ->
+ NeededOidType = case Acc of
+ [] -> o_id;
+ [_|_] -> rel_oid
+ end,
+ try get_oid_value(S, NeededOidType, true, Id) of
+ Val when is_integer(Val) ->
+ validate_oid(S, OidType, Vrest, [Val|Acc]);
+ Val when is_tuple(Val) ->
+ L = tuple_to_list(Val),
+ validate_oid(S, OidType, Vrest, lists:reverse(L, Acc))
+ catch
+ _:_ ->
case reserved_objectid(Id#'Externalvaluereference'.value, Acc) of
Value when is_integer(Value) ->
- validate_oid(valid_objectid(OID,Value,Acc),
- S, OID,Vrest, [Value|Acc]);
+ validate_oid(S, OidType,Vrest, [Value|Acc]);
false ->
- error({value, {"illegal "++to_string(OID),[Id,Vrest],Acc}, S})
+ asn1_error(S, {illegal_oid,OidType})
end
end;
-validate_oid(_, S, OID, [{#seqtag{module=Mod,val=Atom},Value}], [])
- when is_atom(Atom),is_integer(Value) ->
- %% this case when an OBJECT IDENTIFIER value has been parsed as a
- %% SEQUENCE value
- Rec = #'Externalvaluereference'{module=Mod,
- value=Atom},
- validate_objectidentifier1(S, OID, [Rec,Value]);
-validate_oid(_, S, OID, [{#seqtag{module=Mod,val=Atom},EVRef}], [])
- when is_atom(Atom),is_record(EVRef,'Externalvaluereference') ->
- %% this case when an OBJECT IDENTIFIER value has been parsed as a
- %% SEQUENCE value OTP-4354
- Rec = #'Externalvaluereference'{module=Mod,
- value=Atom},
- validate_objectidentifier1(S, OID, [Rec,EVRef]);
-validate_oid(_, S, OID, [#seqtag{module=Mod,val=Atom}|Rest], Acc)
- when is_atom(Atom) ->
- Rec = #'Externalvaluereference'{module=Mod,
- value=Atom},
- validate_oid(true,S, OID, [Rec|Rest],Acc);
-validate_oid(_, S, OID, V, Acc) ->
- error({value, {"illegal "++to_string(OID),V,Acc},S}).
-
-is_object_id(OID,S,ERef=#'Externaltypereference'{}) ->
- {_,OI} = get_referenced_type(S,ERef),
- is_object_id(OID,S,OI#typedef.typespec);
-is_object_id(o_id,_S,'OBJECT IDENTIFIER') ->
- true;
-is_object_id(rel_oid,_S,'RELATIVE-OID') ->
- true;
-is_object_id(_,_S,'INTEGER') ->
- true;
-is_object_id(OID,S,#type{def=Def}) ->
- is_object_id(OID,S,Def);
-is_object_id(_,_S,_) ->
- false.
-
-to_string(o_id) ->
- "OBJECT IDENTIFIER";
-to_string(rel_oid) ->
- "RELATIVE-OID".
+validate_oid(S, OidType, _V, _Acc) ->
+ asn1_error(S, {illegal_oid,OidType}).
+
+get_oid_type(S, OidType, #type{def=Def}) ->
+ get_oid_type(S, OidType, Def);
+get_oid_type(S, OidType, #'Externaltypereference'{}=Id) ->
+ {_,OI} = get_referenced_type(S, Id),
+ get_oid_type(S, OidType, OI#typedef.typespec);
+get_oid_type(_S, o_id, 'OBJECT IDENTIFIER'=T) ->
+ T;
+get_oid_type(_S, rel_oid, 'RELATIVE-OID'=T) ->
+ T;
+get_oid_type(_S, _, 'INTEGER'=T) ->
+ T;
+get_oid_type(S, OidType, _) ->
+ asn1_error(S, {illegal_oid,OidType}).
%% ITU-T Rec. X.680 Annex B - D
reserved_objectid('itu-t',[]) -> 0;
@@ -2380,7 +1903,6 @@ reserved_objectid('x',[0,0]) -> 24;
reserved_objectid('y',[0,0]) -> 25;
reserved_objectid('z',[0,0]) -> 26;
-
reserved_objectid(iso,[]) -> 1;
%% arcs below "iso", note that number 1 is not used
reserved_objectid('standard',[1]) -> 0;
@@ -2392,25 +1914,22 @@ reserved_objectid('joint-iso-ccitt',[]) -> 2;
reserved_objectid(_,_) -> false.
-valid_objectid(_OID,[],_Acc) ->
- true;
-valid_objectid(OID,[H|T],Acc) ->
- case valid_objectid(OID, H, Acc) of
- true ->
- valid_objectid(OID,T,[H|Acc]);
- _ ->
- false
- end;
-valid_objectid(o_id,I,[]) when I =:= 0; I =:= 1; I =:= 2 -> true;
-valid_objectid(o_id,_I,[]) -> false;
-valid_objectid(o_id,I,[0]) when I >= 0; I =< 4 -> true;
-valid_objectid(o_id,_I,[0]) -> false;
-valid_objectid(o_id,I,[1]) when I =:= 0; I =:= 2; I =:= 3 -> true;
-valid_objectid(o_id,_I,[1]) -> false;
-valid_objectid(o_id,_I,[2]) -> true;
-valid_objectid(_,_,_) -> true.
-
-convert_external(S=#state{type=Vtype}, Value) ->
+validate_oid_path(_, rel_oid, _) ->
+ ok;
+validate_oid_path(_, o_id, [0,I|_]) when 0 =< I, I =< 9 ->
+ ok;
+validate_oid_path(_, o_id, [1,I|_]) when 0 =< I, I =< 3 ->
+ ok;
+validate_oid_path(_, o_id, [2|_]) ->
+ ok;
+validate_oid_path(S, o_id=OidType, _) ->
+ asn1_error(S, {illegal_oid,OidType}).
+
+%%%
+%%% End of OBJECT IDENTFIER/RELATIVE-OID validation.
+%%%
+
+convert_external(S, Vtype, Value) ->
case Vtype of
#type{tag=[{tag,'UNIVERSAL',8,'IMPLICIT',32}]} ->
%% this is an 'EXTERNAL' (or INSTANCE OF)
@@ -2435,7 +1954,7 @@ to_EXTERNAL1990(S, [{#seqtag{val=identification}=T,
to_EXTERNAL1990(S, Rest, [{T#seqtag{val='indirect-reference'},PCid},
{T#seqtag{val='direct-reference'},TrStx}]);
to_EXTERNAL1990(S, _) ->
- error({value,"illegal value in EXTERNAL type",S}).
+ asn1_error(S, illegal_external_value).
to_EXTERNAL1990(S, [V={#seqtag{val='data-value-descriptor'},_}|Rest], Acc) ->
to_EXTERNAL1990(S, Rest, [V|Acc]);
@@ -2443,7 +1962,7 @@ to_EXTERNAL1990(_S, [{#seqtag{val='data-value'}=T,Val}], Acc) ->
Encoding = {T#seqtag{val=encoding},{'CHOICE',{'octet-aligned',Val}}},
lists:reverse([Encoding|Acc]);
to_EXTERNAL1990(S, _, _) ->
- error({value,"illegal value in EXTERNAL type",S}).
+ asn1_error(S, illegal_external_value).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Functions to normalize the default values of SEQUENCE
@@ -2453,17 +1972,16 @@ normalize_value(_,_,mandatory,_) ->
mandatory;
normalize_value(_,_,'OPTIONAL',_) ->
'OPTIONAL';
-normalize_value(S0, Type, {'DEFAULT',Value}, NameList) ->
- S = S0#state{value=Value},
+normalize_value(S, Type, {'DEFAULT',Value}, NameList) ->
case catch get_canonic_type(S,Type,NameList) of
{'BOOLEAN',CType,_} ->
normalize_boolean(S,Value,CType);
{'INTEGER',CType,_} ->
- normalize_integer(S,Value,CType);
+ normalize_integer(S, Value, CType);
{'BIT STRING',CType,_} ->
normalize_bitstring(S,Value,CType);
- {'OCTET STRING',CType,_} ->
- normalize_octetstring(S0, Value, CType);
+ {'OCTET STRING',_,_} ->
+ normalize_octetstring(S, Value);
{'NULL',_CType,_} ->
%%normalize_null(Value);
'NULL';
@@ -2499,39 +2017,41 @@ normalize_value(S0, Type, {'DEFAULT',Value}, NameList) ->
normalize_value(S,Type,Val,NameList) ->
normalize_value(S,Type,{'DEFAULT',Val},NameList).
-normalize_boolean(S,{Name,Bool},CType) when is_atom(Name) ->
- normalize_boolean(S,Bool,CType);
normalize_boolean(_,true,_) ->
true;
normalize_boolean(_,false,_) ->
false;
normalize_boolean(S,Bool=#'Externalvaluereference'{},CType) ->
get_normalized_value(S,Bool,CType,fun normalize_boolean/3,[]);
-normalize_boolean(_,Other,_) ->
- throw({error,{asn1,{'invalid default value',Other}}}).
+normalize_boolean(S, _, _) ->
+ asn1_error(S, {illegal_value, "BOOLEAN"}).
-normalize_integer(_S,Int,_) when is_integer(Int) ->
+normalize_integer(_S, Int, _) when is_integer(Int) ->
Int;
-normalize_integer(_S,{Name,Int},_) when is_atom(Name),is_integer(Int) ->
- Int;
-normalize_integer(S,{Name,Int=#'Externalvaluereference'{}},
- Type) when is_atom(Name) ->
- normalize_integer(S,Int,Type);
-normalize_integer(S,Int=#'Externalvaluereference'{value=Name},Type) ->
- case Type of
- NNL when is_list(NNL) ->
- case lists:keysearch(Name,1,NNL) of
- {value,{Name,Val}} ->
+normalize_integer(S, #'Externalvaluereference'{value=Name}=Ref, NNL) ->
+ case lists:keyfind(Name, 1, NNL) of
+ {Name,Val} ->
+ Val;
+ false ->
+ try get_referenced_value(S, Ref) of
+ Val when is_integer(Val) ->
Val;
- false ->
- get_normalized_value(S,Int,Type,
- fun normalize_integer/3,[])
- end;
+ _ ->
+ asn1_error(S, illegal_integer_value)
+ catch
+ throw:_ ->
+ asn1_error(S, illegal_integer_value)
+ end
+ end;
+normalize_integer(S, {'ValueFromObject',{object,Obj},FieldNames}, _) ->
+ case extract_field(S, Obj, FieldNames) of
+ #valuedef{value=Val} when is_integer(Val) ->
+ Val;
_ ->
- get_normalized_value(S,Int,Type,fun normalize_integer/3,[])
+ asn1_error(S, illegal_integer_value)
end;
-normalize_integer(_,Int,_) ->
- exit({'Unknown INTEGER value',Int}).
+normalize_integer(S, _, _) ->
+ asn1_error(S, illegal_integer_value).
%% normalize_bitstring(S, Value, Type) -> bitstring()
%% Convert a literal value for a BIT STRING to an Erlang bit string.
@@ -2543,36 +2063,34 @@ normalize_bitstring(S, Value, Type)->
{bstring,String} when is_list(String) ->
bstring_to_bitstring(String);
#'Externalvaluereference'{} ->
- get_normalized_value(S, Value, Type,
- fun normalize_bitstring/3, []);
- RecList when is_list(RecList) ->
- F = fun(#'Externalvaluereference'{value=Name}) ->
- case lists:keymember(Name, 1, Type) of
- true -> Name;
- false -> throw({error,false})
- end;
- (Name) when is_atom(Name) ->
- %% Already normalized.
- Name;
- (Other) ->
- throw({error,Other})
- end,
- try
- lists:map(F, RecList)
- catch
- throw:{error,Reason} ->
- asn1ct:warning("default value not "
- "compatible with type definition ~p~n",
- [Reason],S,
- "default value not "
- "compatible with type definition"),
- Value
+ Val = get_referenced_value(S, Value),
+ normalize_bitstring(S, Val, Type);
+ {'ValueFromObject',{object,Obj},FieldNames} ->
+ case extract_field(S, Obj, FieldNames) of
+ #valuedef{value=Val} ->
+ normalize_bitstring(S, Val, Type);
+ _ ->
+ asn1_error(S, {illegal_value, "BIT STRING"})
end;
+ RecList when is_list(RecList) ->
+ [normalize_bs_item(S, Item, Type) || Item <- RecList];
Bs when is_bitstring(Bs) ->
%% Already normalized.
- Bs
+ Bs;
+ _ ->
+ asn1_error(S, {illegal_value, "BIT STRING"})
end.
+normalize_bs_item(S, #'Externalvaluereference'{value=Name}, Type) ->
+ case lists:keymember(Name, 1, Type) of
+ true -> Name;
+ false -> asn1_error(S, {illegal_value, "BIT STRING"})
+ end;
+normalize_bs_item(_, Atom, _) when is_atom(Atom) ->
+ Atom;
+normalize_bs_item(S, _, _) ->
+ asn1_error(S, {illegal_value, "BIT STRING"}).
+
hstring_to_binary(L) ->
byte_align(hstring_to_bitstring(L)).
@@ -2600,29 +2118,35 @@ hex_to_int(D) when $A =< D, D =< $F -> D - ($A - 10).
%% {bstring,String} each element in String corresponds to one bit in an octet
%% {hstring,String} each element in String corresponds to one byte in an octet
%% #'Externalvaluereference'
-normalize_octetstring(S,Value,CType) ->
+normalize_octetstring(S, Value) ->
case Value of
{bstring,String} ->
bstring_to_binary(String);
{hstring,String} ->
hstring_to_binary(String);
- Rec when is_record(Rec,'Externalvaluereference') ->
- get_normalized_value(S,Value,CType,
- fun normalize_octetstring/3,[]);
- {Name,String} when is_atom(Name) ->
- normalize_octetstring(S,String,CType);
+ #'Externalvaluereference'{} ->
+ case get_referenced_value(S, Value) of
+ String when is_binary(String) ->
+ String;
+ Other ->
+ normalize_octetstring(S, Other)
+ end;
+ {'ValueFromObject',{object,Obj},FieldNames} ->
+ case extract_field(S, Obj, FieldNames) of
+ #valuedef{value=Val} when is_binary(Val) ->
+ Val;
+ _ ->
+ asn1_error(S, illegal_octet_string_value)
+ end;
_ ->
- Item = S#state.value,
- asn1_error(S, Item, illegal_octet_string_value)
+ asn1_error(S, illegal_octet_string_value)
end.
normalize_objectidentifier(S, Value) ->
- {ok,Val} = validate_objectidentifier(S, o_id, Value, []),
- Val.
+ validate_objectidentifier(S, o_id, Value).
-normalize_relative_oid(S,Value) ->
- {ok,Val} = validate_objectidentifier(S, rel_oid, Value, []),
- Val.
+normalize_relative_oid(S, Value) ->
+ validate_objectidentifier(S, rel_oid, Value).
normalize_objectdescriptor(Value) ->
Value.
@@ -2644,40 +2168,22 @@ lookup_enum_value(S, Id, NNL) when is_atom(Id) ->
{_,_}=Ret ->
Ret;
false ->
- asn1_error(S, S#state.value, {undefined,Id})
+ asn1_error(S, {undefined,Id})
end.
-normalize_choice(S,{'CHOICE',{C,V}},CType,NameList) when is_atom(C) ->
- case catch lists:keysearch(C,#'ComponentType'.name,CType) of
- {value,#'ComponentType'{typespec=CT,name=Name}} ->
- {C,normalize_value(S,CT,{'DEFAULT',V},
- [Name|NameList])};
- Other ->
- asn1ct:warning("Wrong format of type/value ~p/~p~n",[Other,V],S,
- "Wrong format of type/value"),
- {C,V}
+normalize_choice(S, {'CHOICE',{C,V}}, CType, NameList)
+ when is_atom(C) ->
+ case lists:keyfind(C, #'ComponentType'.name, CType) of
+ #'ComponentType'{typespec=CT,name=Name} ->
+ {C,normalize_value(S, CT, {'DEFAULT',V}, [Name|NameList])};
+ false ->
+ asn1_error(S, {illegal_id,C})
end;
-normalize_choice(S,{'DEFAULT',ValueList},CType,NameList) when is_list(ValueList) ->
- lists:map(fun(X)-> normalize_choice(S,X,CType,NameList) end, ValueList);
-normalize_choice(S,Val=#'Externalvaluereference'{},CType,NameList) ->
- {M,#valuedef{value=V}}=get_referenced_type(S,Val),
- normalize_choice(update_state(S,M),{'CHOICE',V},CType,NameList);
-% get_normalized_value(S,Val,CType,fun normalize_choice/4,[NameList]);
-normalize_choice(S,CV={Name,_ChoiceVal},CType,NameList)
+normalize_choice(S,CV={Name,_ChoiceVal},CType,NameList)
when is_atom(Name) ->
-% normalize_choice(S,ChoiceVal,CType,NameList).
normalize_choice(S,{'CHOICE',CV},CType,NameList);
-normalize_choice(_S,V,_CType,_NameList) ->
- exit({error,{bad_choice_value,V}}).
-
-%% normalize_choice(NameList,S,CVal = {'CHOICE',{_,_}},CType,_) ->
-%% normalize_choice(S,CVal,CType,NameList);
-%% normalize_choice(NameList,S,CVal={'DEFAULT',VL},CType,_) when is_list(VL)->
-%% normalize_choice(S,CVal,CType,NameList);
-%% normalize_choice(NameList,S,CV={Name,_CV},CType,_) when is_atom(Name)->
-%% normalize_choice(S,{'CHOICE',CV},CType,NameList);
-%% normalize_choice(_,_S,V,_,_) ->
-%% V.
+normalize_choice(S, V, _CType, _NameList) ->
+ asn1_error(S, {illegal_id, error_value(V)}).
normalize_sequence(S,Value,Components,NameList)
when is_tuple(Components) ->
@@ -2732,12 +2238,9 @@ normalized_record(SorS,S,Value,Components,NameList) ->
Value;
_ ->
NoComps = length(Components),
- case normalize_seq_or_set(SorS,S,Value,Components,NameList,[]) of
- ListOfVals when length(ListOfVals) == NoComps ->
- list_to_tuple([NewName|ListOfVals]);
- _ ->
- error({type,{illegal,default,value,Value},S})
- end
+ ListOfVals = normalize_seq_or_set(SorS,S,Value,Components,NameList,[]),
+ NoComps = length(ListOfVals), %% Assert
+ list_to_tuple([NewName|ListOfVals])
end.
is_record_normalized(S,Name,V = #'Externalvaluereference'{},NumComps) ->
case get_referenced_type(S,V) of
@@ -2750,10 +2253,11 @@ is_record_normalized(_S,Name,Value,NumComps) when is_tuple(Value) ->
is_record_normalized(_,_,_,_) ->
false.
-normalize_seq_or_set(SorS, S, [{#seqtag{val=Cname},V}|Vs],
+normalize_seq_or_set(SorS, S,
+ [{#seqtag{val=Cname},V}|Vs],
[#'ComponentType'{name=Cname,typespec=TS}|Cs],
NameList, Acc) ->
- NewNameList =
+ NewNameList =
case TS#type.def of
#'Externaltypereference'{type=TName} ->
[TName];
@@ -2761,24 +2265,26 @@ normalize_seq_or_set(SorS, S, [{#seqtag{val=Cname},V}|Vs],
end,
NVal = normalize_value(S,TS,{'DEFAULT',V},NewNameList),
normalize_seq_or_set(SorS,S,Vs,Cs,NameList,[NVal|Acc]);
-normalize_seq_or_set(SorS,S,Values=[{_Cname1,_V}|_Vs],
+normalize_seq_or_set(SorS, S,
+ Values=[{#seqtag{val=Cname0},_V}|_Vs],
[#'ComponentType'{prop='OPTIONAL'}|Cs],
- NameList,Acc) ->
+ NameList, Acc) ->
+ verify_valid_component(S, Cname0, Cs),
normalize_seq_or_set(SorS,S,Values,Cs,NameList,[asn1_NOVALUE|Acc]);
-normalize_seq_or_set(SorS,S,Values=[{_Cname1,_V}|_Vs],
- [#'ComponentType'{name=Cname2,typespec=TS,
- prop={'DEFAULT',Value}}|Cs],
- NameList,Acc) ->
- NewNameList =
+normalize_seq_or_set(SorS, S,
+ Values=[{#seqtag{val=Cname0},_V}|_Vs],
+ [#'ComponentType'{name=Cname,typespec=TS,
+ prop={'DEFAULT',Value}}|Cs],
+ NameList, Acc) ->
+ verify_valid_component(S, Cname0, Cs),
+ NewNameList =
case TS#type.def of
#'Externaltypereference'{type=TName} ->
[TName];
- _ -> [Cname2|NameList]
+ _ -> [Cname|NameList]
end,
NVal = normalize_value(S,TS,{'DEFAULT',Value},NewNameList),
normalize_seq_or_set(SorS,S,Values,Cs,NameList,[NVal|Acc]);
-normalize_seq_or_set(_SorS,_S,[],[],_,Acc) ->
- lists:reverse(Acc);
%% If default value is {} ComponentTypes in SEQUENCE are marked DEFAULT
%% or OPTIONAL (or the type is defined SEQUENCE{}, which is handled by
%% the previous case).
@@ -2801,9 +2307,23 @@ normalize_seq_or_set(SorS,S,Value=#'Externalvaluereference'{},
Cs,NameList,Acc) ->
get_normalized_value(S,Value,Cs,fun normalize_seq_or_set/6,
[SorS,NameList,Acc]);
-normalize_seq_or_set(_SorS,S,V,_,_,_) ->
- error({type,{illegal,default,value,V},S}).
-
+normalize_seq_or_set(_SorS, _S, [], [], _, Acc) ->
+ lists:reverse(Acc);
+normalize_seq_or_set(_SorS, S, V, Cs, _, _) ->
+ case V of
+ [{#seqtag{val=Name},_}|_] ->
+ asn1_error(S, {illegal_id,error_value(Name)});
+ [] ->
+ [#'ComponentType'{name=Name}|_] = Cs,
+ asn1_error(S, {missing_id,error_value(Name)})
+ end.
+
+verify_valid_component(S, Name, Cs) ->
+ case lists:keyfind(Name, #'ComponentType'.name, Cs) of
+ false -> asn1_error(S, {illegal_id,error_value(Name)});
+ #'ComponentType'{} -> ok
+ end.
+
normalize_seqof(S,Value,Type,NameList) ->
normalize_s_of('SEQUENCE OF',S,Value,Type,NameList).
@@ -2859,10 +2379,7 @@ normalize_restrictedstring(_S,CString,_) when is_list(CString) ->
%% definedvalue case or argument in a parameterized type
normalize_restrictedstring(S,ERef,CType) when is_record(ERef,'Externalvaluereference') ->
get_normalized_value(S,ERef,CType,
- fun normalize_restrictedstring/3,[]);
-%%
-normalize_restrictedstring(S,{Name,Val},CType) when is_atom(Name) ->
- normalize_restrictedstring(S,Val,CType).
+ fun normalize_restrictedstring/3,[]).
normalize_objectclassfieldvalue(S,{opentypefieldvalue,Type,Value},NameList) ->
%% An open type has per definition no type. Thus should the type
@@ -2910,6 +2427,8 @@ call_Func(S,Val,Type,Func,ArgList) ->
get_canonic_type(S,Type,NameList) ->
{InnerType,NewType,NewNameList} =
case Type#type.def of
+ 'INTEGER'=Name ->
+ {Name,[],NameList};
Name when is_atom(Name) ->
{Name,Type,NameList};
Ref when is_record(Ref,'Externaltypereference') ->
@@ -2964,8 +2483,8 @@ check_formal_parameter(_, {_,_}) ->
ok;
check_formal_parameter(_, #'Externaltypereference'{}) ->
ok;
-check_formal_parameter(S, #'Externalvaluereference'{value=Name}=Ref) ->
- asn1_error(S, Ref, {illegal_typereference,Name}).
+check_formal_parameter(S, #'Externalvaluereference'{value=Name}) ->
+ asn1_error(S, {illegal_typereference,Name}).
% check_type(S,Type,ObjSpec={{objectclassname,_},_}) ->
% check_class(S,ObjSpec);
@@ -2977,7 +2496,7 @@ check_type(_S,Type,Ts) when is_record(Type,typedef),
Ts;
check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
{Def,Tag,Constr,IsInlined} =
- case match_parameters(S,Ts#type.def,S#state.parameters) of
+ case match_parameter(S, Ts#type.def) of
#type{tag=PTag,constraint=_Ctmp,def=Dtmp,inlined=Inl} ->
{Dtmp,merge_tags(Ts#type.tag,PTag),Ts#type.constraint,Inl};
#typedef{typespec=#type{tag=PTag,def=Dtmp,inlined=Inl}} ->
@@ -2989,16 +2508,16 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
inlined=IsInlined},
TestFun =
fun(Tref) ->
- MaybeChoice = get_non_typedef(S, Tref),
+ {_, MaybeChoice} = get_referenced_type(S, Tref, true),
case catch((MaybeChoice#typedef.typespec)#type.def) of
{'CHOICE',_} ->
- maybe_illicit_implicit_tag(choice,Tag);
+ maybe_illicit_implicit_tag(S, choice, Tag);
'ANY' ->
- maybe_illicit_implicit_tag(open_type,Tag);
+ maybe_illicit_implicit_tag(S, open_type, Tag);
'ANY DEFINED BY' ->
- maybe_illicit_implicit_tag(open_type,Tag);
+ maybe_illicit_implicit_tag(S, open_type, Tag);
'ASN1_OPEN_TYPE' ->
- maybe_illicit_implicit_tag(open_type,Tag);
+ maybe_illicit_implicit_tag(S, open_type, Tag);
_ ->
Tag
end
@@ -3007,7 +2526,7 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
case Def of
Ext when is_record(Ext,'Externaltypereference') ->
{RefMod,RefTypeDef,IsParamDef} =
- case get_referenced_type(S,Ext) of
+ case get_referenced_type(S, Ext) of
{undefined,TmpTDef} -> %% A parameter
{get(top_module),TmpTDef,true};
{TmpRefMod,TmpRefDef} ->
@@ -3031,7 +2550,6 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
NewS = S#state{mname=RefMod,
module=load_asn1_module(S,RefMod),
tname=get_datastr_name(NewRefTypeDef1),
- type=NewRefTypeDef1,
abscomppath=[],recordtopname=[]},
RefType1 =
check_type(NewS,RefTypeDef,RefTypeDef#typedef.typespec),
@@ -3051,18 +2569,17 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
Key);
_ -> ok
end,
+ Pos = Ext#'Externaltypereference'.pos,
{RefType1,#'Externaltypereference'{module=RefMod,
+ pos=Pos,
type=TmpName}}
end,
case asn1ct_gen:prim_bif(asn1ct_gen:get_inner(RefType#type.def)) of
true ->
%% Here we expand to a built in type and inline it
- NewS2 = S#state{type=#typedef{typespec=RefType}},
- NewC =
- constraint_merge(NewS2,
- check_constraints(NewS2,Constr)++
- RefType#type.constraint),
+ NewC = check_constraints(S, RefType, Constr ++
+ RefType#type.constraint),
TempNewDef#newt{
type = RefType#type.def,
tag = merge_tags(Ct,RefType#type.tag),
@@ -3073,19 +2590,13 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
NewExt = ExtRef#'Externaltypereference'{module=merged_mod(S,RefMod,Ext)},
TempNewDef#newt{
type = check_externaltypereference(S,NewExt),
- tag = case S#state.erule of
- ber ->
- merge_tags(Ct,RefType#type.tag);
- _ ->
- Ct
- end
- }
+ tag = merge_tags(Ct,RefType#type.tag)}
end;
'ANY' ->
- Ct=maybe_illicit_implicit_tag(open_type,Tag),
+ Ct = maybe_illicit_implicit_tag(S, open_type, Tag),
TempNewDef#newt{type='ASN1_OPEN_TYPE',tag=Ct};
{'ANY_DEFINED_BY',_} ->
- Ct=maybe_illicit_implicit_tag(open_type,Tag),
+ Ct = maybe_illicit_implicit_tag(S, open_type, Tag),
TempNewDef#newt{type='ASN1_OPEN_TYPE',tag=Ct};
'INTEGER' ->
TempNewDef#newt{tag=
@@ -3132,7 +2643,7 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
{'ENUMERATED',NamedNumberList} ->
TempNewDef#newt{type=
{'ENUMERATED',
- check_enumerated(S,NamedNumberList,Constr)},
+ check_enumerated(S, NamedNumberList)},
tag=
merge_tags(Tag,?TAG_PRIMITIVE(?N_ENUMERATED)),
constraint=[]};
@@ -3235,7 +2746,7 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
tag=
merge_tags(Tag,?TAG_CONSTRUCTED(?N_SEQUENCE))};
{'CHOICE',Components} ->
- Ct = maybe_illicit_implicit_tag(choice,Tag),
+ Ct = maybe_illicit_implicit_tag(S, choice, Tag),
TempNewDef#newt{type={'CHOICE',check_choice(S,Type,Components)},tag=Ct};
Set when is_record(Set,'SET') ->
RecordName=
@@ -3258,12 +2769,6 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
TempNewDef#newt{type={'SET OF',check_setof(S,Type,Components)},
tag=
merge_tags(Tag,?TAG_CONSTRUCTED(?N_SET))};
- %% This is a temporary hack until the full Information Obj Spec
- %% in X.681 is supported
- {#'Externaltypereference'{type='TYPE-IDENTIFIER'},
- [{typefieldreference,_,'Type'}]} ->
- Ct=maybe_illicit_implicit_tag(open_type,Tag),
- TempNewDef#newt{type='ASN1_OPEN_TYPE',tag=Ct};
{pt,Ptype,ParaList} ->
%% Ptype might be a parameterized - type, object set or
@@ -3271,18 +2776,18 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
%% calling function.
{_RefMod,Ptypedef} = get_referenced_type(S,Ptype),
notify_if_not_ptype(S,Ptypedef),
- NewParaList =
- [match_parameters(S,TmpParam,S#state.parameters)||
- TmpParam <- ParaList],
+ NewParaList = match_parameters(S, ParaList),
Instance = instantiate_ptype(S,Ptypedef,NewParaList),
TempNewDef#newt{type=Instance#type.def,
tag=merge_tags(Tag,Instance#type.tag),
constraint=Instance#type.constraint,
inlined=yes};
- OCFT=#'ObjectClassFieldType'{classname=ClRef} ->
+ #'ObjectClassFieldType'{classname=ClRef0}=OCFT0 ->
%% this case occures in a SEQUENCE when
%% the type of the component is a ObjectClassFieldType
+ ClRef = match_parameter(S, ClRef0),
+ OCFT = OCFT0#'ObjectClassFieldType'{classname=ClRef},
ClassSpec = check_class(S,ClRef),
NewTypeDef =
maybe_open_type(S,ClassSpec,
@@ -3292,16 +2797,18 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
Ct =
case is_open_type(NewTypeDef) of
true ->
- maybe_illicit_implicit_tag(open_type,MergedTag);
+ maybe_illicit_implicit_tag(S, open_type, MergedTag);
_ ->
MergedTag
end,
case TopName of
[] when Type#typedef.name =/= undefined ->
%% This is a top-level type.
- #type{def=Simplified} =
- simplify_type(#type{def=NewTypeDef}),
- TempNewDef#newt{type=Simplified,tag=Ct};
+ #type{constraint=C,def=Simplified} =
+ simplify_type(#type{def=NewTypeDef,
+ constraint=Constr}),
+ TempNewDef#newt{type=Simplified,tag=Ct,
+ constraint=C};
_ ->
TempNewDef#newt{type=NewTypeDef,tag=Ct}
end;
@@ -3311,33 +2818,21 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
TempNewDef#newt{tag=merge_tags(Tag,CheckedT#type.tag),
type=CheckedT#type.def};
- {valueset,Vtype} ->
- TempNewDef#newt{type={valueset,check_type(S,Type,Vtype)}};
{'SelectionType',Name,T} ->
CheckedT = check_selectiontype(S,Name,T),
TempNewDef#newt{tag=merge_tags(Tag,CheckedT#type.tag),
type=CheckedT#type.def};
- Other ->
- exit({'cant check' ,Other})
+ 'ASN1_OPEN_TYPE' ->
+ TempNewDef
end,
#newt{type=TDef,tag=NewTags,constraint=NewConstr,inlined=Inlined} = NewDef,
Ts#type{def=TDef,
inlined=Inlined,
- constraint=check_constraints(S, NewConstr),
+ constraint=check_constraints(S, #type{def=TDef}, NewConstr),
tag=lists:map(fun(#tag{type={default,TTx}}=TempTag) ->
TempTag#tag{type=TTx};
(Other) -> Other
- end, NewTags)};
-check_type(_S,Type,Ts) ->
- exit({error,{asn1,internal_error,Type,Ts}}).
-
-get_non_typedef(S, Tref0) ->
- case get_referenced_type(S, Tref0) of
- {_,#typedef{typespec=#type{def=#'Externaltypereference'{}=Tref}}} ->
- get_non_typedef(S, Tref);
- {_,Type} ->
- Type
- end.
+ end, NewTags)}.
%%
@@ -3353,10 +2848,11 @@ simplify_comp(#'ComponentType'{typespec=Type0}=C) ->
C#'ComponentType'{typespec=Type};
simplify_comp(Other) -> Other.
-simplify_type(#type{tag=Tag,def=Inner}=T) ->
+simplify_type(#type{tag=Tag,def=Inner,constraint=Constr0}=T) ->
case Inner of
- #'ObjectClassFieldType'{type={fixedtypevaluefield,_,Type}} ->
- Type#type{tag=Tag};
+ #'ObjectClassFieldType'{type={fixedtypevaluefield,_,Type}}=OCFT ->
+ Constr = [{ocft,OCFT}|Type#type.constraint++Constr0],
+ Type#type{tag=Tag,constraint=Constr};
_ ->
T
end.
@@ -3389,29 +2885,22 @@ get_innertag(_S,#'ObjectClassFieldType'{type=Type}) ->
_ -> []
end.
-get_type_from_object(S,Object,TypeField)
- when is_record(Object,'Externaltypereference');
- is_record(Object,'Externalvaluereference') ->
- {_,ObjectDef} = get_referenced_type(S,Object),
- ObjSpec = check_object(S,ObjectDef,ObjectDef#typedef.typespec),
- get_fieldname_element(S,ObjectDef#typedef{typespec=ObjSpec},TypeField).
-
%% get_class_def(S, Type) -> #classdef{} | 'none'.
get_class_def(S, #typedef{typespec=#type{def=#'Externaltypereference'{}=Eref}}) ->
- {_,NextDef} = get_referenced_type(S, Eref),
+ {_,NextDef} = get_referenced_type(S, Eref, true),
get_class_def(S, NextDef);
get_class_def(S, #'Externaltypereference'{}=Eref) ->
- {_,NextDef} = get_referenced_type(S, Eref),
+ {_,NextDef} = get_referenced_type(S, Eref, true),
get_class_def(S, NextDef);
get_class_def(_S, #classdef{}=CD) ->
CD;
get_class_def(_S, _) ->
none.
-maybe_illicit_implicit_tag(Kind,Tag) ->
+maybe_illicit_implicit_tag(S, Kind, Tag) ->
case Tag of
[#tag{type='IMPLICIT'}|_T] ->
- throw({error,{asn1,{implicit_tag_before,Kind}}});
+ asn1_error(S, {implicit_tag_before,Kind});
[ChTag = #tag{type={default,_}}|T] ->
case Kind of
open_type ->
@@ -3438,19 +2927,24 @@ merged_mod(S,RefMod,Ext) ->
%% any UNIQUE field, so that a component relation constraint cannot specify
%% the type of a typefield, return 'ASN1_OPEN_TYPE'.
%%
-maybe_open_type(S,ClassSpec=#objectclass{fields=Fs},
- OCFT=#'ObjectClassFieldType'{fieldname=FieldRefList},
+maybe_open_type(_, _, #'ObjectClassFieldType'{fieldname={_,_}}=OCFT, _) ->
+ %% Already converted.
+ OCFT;
+maybe_open_type(S, #objectclass{fields=Fs}=ClassSpec,
+ #'ObjectClassFieldType'{fieldname=FieldRefList}=OCFT,
Constr) ->
- Type = get_ObjectClassFieldType(S,Fs,FieldRefList),
- FieldNames=get_referenced_fieldname(FieldRefList),
- case last_fieldname(FieldRefList) of
+ Type = get_OCFType(S, Fs, FieldRefList),
+ FieldNames = get_referenced_fieldname(FieldRefList),
+ case lists:last(FieldRefList) of
{valuefieldreference,_} ->
OCFT#'ObjectClassFieldType'{fieldname=FieldNames,
type=Type};
{typefieldreference,_} ->
- case {catch get_unique_fieldname(S,#classdef{typespec=ClassSpec}),
- asn1ct_gen:get_constraint(Constr,componentrelation)}of
- {Tuple,_} when tuple_size(Tuple) =:= 3 ->
+ %% Note: The constraints have not been checked yet,
+ %% so we must use a special lookup routine.
+ case {get_unique_fieldname(S, #classdef{typespec=ClassSpec}),
+ get_componentrelation(Constr)} of
+ {no_unique,_} ->
OCFT#'ObjectClassFieldType'{fieldname=FieldNames,
type='ASN1_OPEN_TYPE'};
{_,no} ->
@@ -3462,16 +2956,12 @@ maybe_open_type(S,ClassSpec=#objectclass{fields=Fs},
end
end.
-last_fieldname(FieldRefList) when is_list(FieldRefList) ->
- lists:last(FieldRefList);
-last_fieldname({FieldName,_}) when is_atom(FieldName) ->
- [A|_] = atom_to_list(FieldName),
- case is_lowercase(A) of
- true ->
- {valuefieldreference,FieldName};
- _ ->
- {typefieldreference,FieldName}
- end.
+get_componentrelation([{element_set,{componentrelation,_,_}=Cr,none}|_]) ->
+ Cr;
+get_componentrelation([_|T]) ->
+ get_componentrelation(T);
+get_componentrelation([]) ->
+ no.
is_open_type(#'ObjectClassFieldType'{type='ASN1_OPEN_TYPE'}) ->
true;
@@ -3510,35 +3000,19 @@ notify_if_not_ptype(S,#pobjectsetdef{class=Cl}) ->
_ ->
throw(pobjectsetdef)
end;
-notify_if_not_ptype(_S,PT) ->
- throw({error,{"supposed to be a parameterized type",PT}}).
-% fix me
+notify_if_not_ptype(S, PT) ->
+ asn1_error(S, {param_bad_type, error_value(PT)}).
+
instantiate_ptype(S,Ptypedef,ParaList) ->
#ptypedef{args=Args,typespec=Type} = Ptypedef,
NewType = check_ptype(S,Ptypedef,Type#type{inlined=yes}),
MatchedArgs = match_args(S,Args, ParaList, []),
OldArgs = S#state.parameters,
- NewS = S#state{type=NewType,parameters=MatchedArgs++OldArgs,abscomppath=[]},
-%% NewS = S#state{type=NewType,parameters=MatchedArgs,abscomppath=[]},
+ NewS = S#state{parameters=MatchedArgs++OldArgs,abscomppath=[]},
check_type(NewS, Ptypedef#ptypedef{typespec=NewType}, NewType).
-get_datastr_name(#typedef{name=N}) ->
- N;
-get_datastr_name(#classdef{name=N}) ->
- N;
-get_datastr_name(#valuedef{name=N}) ->
- N;
-get_datastr_name(#ptypedef{name=N}) ->
- N;
-get_datastr_name(#pvaluedef{name=N}) ->
- N;
-get_datastr_name(#pvaluesetdef{name=N}) ->
- N;
-get_datastr_name(#pobjectdef{name=N}) ->
- N;
-get_datastr_name(#pobjectsetdef{name=N}) ->
- N.
-
+get_datastr_name(Type) ->
+ asn1ct:get_name_of_def(Type).
get_pt_args(#ptypedef{args=Args}) ->
Args;
@@ -3606,8 +3080,8 @@ match_args(S,FA = [FormArg|Ft], AA = [ActArg|At], Acc) ->
end;
match_args(_S,[], [], Acc) ->
lists:reverse(Acc);
-match_args(_,_, _, _) ->
- throw({error,{asn1,{wrong_number_of_arguments}}}).
+match_args(S, _, _, _) ->
+ asn1_error(S, param_wrong_number_of_arguments).
%%%%%%%%%%%%%%%%%
%% categorize_arg(S,FormalArg,ActualArg) -> {FormalArg,CatgorizedActualArg}
@@ -3652,11 +3126,6 @@ parameter_name_style(#'Externaltypereference'{}) ->
parameter_name_style(#'Externalvaluereference'{}) ->
beginning_lowercase.
-is_lowercase(X) when X >= $A,X =< $W ->
- false;
-is_lowercase(_) ->
- true.
-
%% categorize(Parameter) -> CategorizedParameter
%% If Parameter has an abstract syntax of another category than
%% Category, transform it to a known syntax.
@@ -3705,725 +3174,503 @@ parse_objectset(Set) ->
Set.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% check_constraints/2
-%%
-check_constraints(S,C) when is_list(C) ->
- check_constraints(S, C, []).
-
-resolv_tuple_or_list(S,List) when is_list(List) ->
- lists:map(fun(X)->resolv_value(S,X) end, List);
-resolv_tuple_or_list(S,{Lb,Ub}) ->
- {resolv_value(S,Lb),resolv_value(S,Ub)}.
-
-%%%-----------------------------------------
-%% If the constraint value is a defined value the valuename
-%% is replaced by the actual value
%%
-resolv_value(S,Val) ->
- Id = match_parameters(S,Val, S#state.parameters),
- resolv_value1(S,Id).
+%% Check and simplify constraints.
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-resolv_value1(S, ERef = #'Externalvaluereference'{value=Name}) ->
- case catch resolve_namednumber(S, S#state.type, Name) of
- V when is_integer(V) ->
- V;
- _ ->
- case get_referenced_type(S,ERef) of
- {Err,_Reason} when Err == error; Err == 'EXIT' ->
- throw({error,{asn1,{undefined_type_or_value,
- Name}}});
- {_M,VDef} ->
- resolv_value1(S,VDef)
- end
- end;
-resolv_value1(S, {gt,V}) ->
- case resolv_value1(S, V) of
- Int when is_integer(Int) ->
- Int + 1;
- Other ->
- throw({error,{asn1,{not_integer_value,Other}}})
- end;
-resolv_value1(S, {lt,V}) ->
- case resolv_value1(S, V) of
- Int when is_integer(Int) ->
- Int - 1;
- Other ->
- throw({error,{asn1,{not_integer_value,Other}}})
- end;
-resolv_value1(S,{'ValueFromObject',{object,Object},[{valuefieldreference,
- FieldName}]}) ->
- %% FieldName can hold either a fixed-type value or a variable-type value
- %% Object is a DefinedObject, i.e. a #'Externaltypereference'
- resolve_value_from_object(S,Object,FieldName);
-resolv_value1(_,#valuedef{checked=true,value=V}) ->
- V;
-resolv_value1(S,#valuedef{type=_T,
- value={'ValueFromObject',{object,Object},
- [{valuefieldreference,
- FieldName}]}}) ->
- resolve_value_from_object(S,Object,FieldName);
-resolv_value1(S,VDef = #valuedef{}) ->
- #valuedef{value=Val} = check_value(S,VDef),
- Val;
-resolv_value1(_,V) ->
- V.
-resolve_value_from_object(S,Object,FieldName) ->
- {_,ObjTDef} = get_referenced_type(S,Object),
- TS = check_object(S,ObjTDef,ObjTDef#typedef.typespec),
- {_,_,Components} = TS#'Object'.def,
- case lists:keysearch(FieldName,1,Components) of
- {value,{_,#valuedef{value=Val}}} ->
- Val;
- _ ->
- error({value,"illegal value in constraint",S})
+check_constraints(_S, _HostType, []) ->
+ [];
+check_constraints(S, HostType0, [_|_]=Cs0) ->
+ HostType = get_real_host_type(HostType0, Cs0),
+ Cs1 = top_level_intersections(Cs0),
+ Cs2 = [coalesce_constraints(C) || C <- Cs1],
+ {_,Cs3} = filter_extensions(Cs2),
+ Cs = simplify_element_sets(S, HostType, Cs3),
+ finish_constraints(Cs).
+
+get_real_host_type(HostType, Cs) ->
+ case lists:keyfind(ocft, 1, Cs) of
+ false -> HostType;
+ {_,OCFT} -> HostType#type{def=OCFT}
end.
+top_level_intersections([{element_set,{intersection,_,_}=C,none}]) ->
+ top_level_intersections_1(C);
+top_level_intersections(Cs) ->
+ Cs.
+
+top_level_intersections_1({intersection,A,B}) ->
+ [{element_set,A,none}|top_level_intersections_1(B)];
+top_level_intersections_1(Other) ->
+ [{element_set,Other,none}].
+
+coalesce_constraints({element_set,
+ {Tag,{element_set,A,_}},
+ {Tag,{element_set,B,_}}}) ->
+ %% (SIZE (C1), ..., (SIZE (C2)) => (SIZE (C1, ..., C2))
+ {element_set,{Tag,{element_set,A,B}},none};
+coalesce_constraints(Other) ->
+ Other.
+
+%% Remove all outermost extensions except the last.
+
+filter_extensions([H0|T0]) ->
+ case filter_extensions(T0) of
+ {true,T} ->
+ H = remove_extension(H0),
+ {true,[H|T]};
+ {false,T} ->
+ {any_extension(H0),[H0|T]}
+ end;
+filter_extensions([]) ->
+ {false,[]}.
-resolve_namednumber(S,#typedef{typespec=Type},Name) ->
- case Type#type.def of
- {'ENUMERATED',NameList} ->
- resolve_namednumber_1(S, Name, NameList, Type);
- {'INTEGER',NameList} ->
- resolve_namednumber_1(S, Name, NameList, Type);
+remove_extension({element_set,Root,_}) ->
+ {element_set,remove_extension(Root),none};
+remove_extension(Tuple) when is_tuple(Tuple) ->
+ L = [remove_extension(El) || El <- tuple_to_list(Tuple)],
+ list_to_tuple(L);
+remove_extension(Other) -> Other.
+
+any_extension({element_set,_,Ext}) when Ext =/= none ->
+ true;
+any_extension(Tuple) when is_tuple(Tuple) ->
+ any_extension_tuple(1, Tuple);
+any_extension(_) -> false.
+
+any_extension_tuple(I, T) when I =< tuple_size(T) ->
+ any_extension(element(I, T)) orelse any_extension_tuple(I+1, T);
+any_extension_tuple(_, _) -> false.
+
+simplify_element_sets(S, HostType, [{element_set,R0,E0}|T0]) ->
+ R1 = simplify_element_set(S, HostType, R0),
+ E1 = simplify_element_set(S, HostType, E0),
+ case simplify_element_sets(S, HostType, T0) of
+ [{element_set,R2,E2}] ->
+ [{element_set,cs_intersection(S, R1, R2),
+ cs_intersection(S, E1, E2)}];
+ L when is_list(L) ->
+ [{element_set,R1,E1}|L]
+ end;
+simplify_element_sets(S, HostType, [H|T]) ->
+ [H|simplify_element_sets(S, HostType, T)];
+simplify_element_sets(_, _, []) ->
+ [].
+
+simplify_element_set(_S, _HostType, empty) ->
+ {set,[]};
+simplify_element_set(S, HostType, {'SingleValue',Vs0}) when is_list(Vs0) ->
+ Vs1 = [resolve_value(S, HostType, V) || V <- Vs0],
+ Vs = make_constr_set_vs(Vs1),
+ simplify_element_set(S, HostType, Vs);
+simplify_element_set(S, HostType, {'SingleValue',V0}) ->
+ V1 = resolve_value(S, HostType, V0),
+ V = {set,[{range,V1,V1}]},
+ simplify_element_set(S, HostType, V);
+simplify_element_set(S, HostType, {'ValueRange',{Lb0,Ub0}}) ->
+ Lb = resolve_value(S, HostType, Lb0),
+ Ub = resolve_value(S, HostType, Ub0),
+ V = make_constr_set(S, Lb, Ub),
+ simplify_element_set(S, HostType, V);
+simplify_element_set(S, HostType, {'ALL-EXCEPT',Set0}) ->
+ Set = simplify_element_set(S, HostType, Set0),
+ {'ALL-EXCEPT',Set};
+simplify_element_set(S, HostType, {intersection,A0,B0}) ->
+ A = simplify_element_set(S, HostType, A0),
+ B = simplify_element_set(S, HostType, B0),
+ cs_intersection(S, A, B);
+simplify_element_set(S, HostType, {union,A0,B0}) ->
+ A = simplify_element_set(S, HostType, A0),
+ B = simplify_element_set(S, HostType, B0),
+ cs_union(S, A, B);
+simplify_element_set(S, HostType, {simpletable,{element_set,Type,_}}) ->
+ check_simpletable(S, HostType, Type);
+simplify_element_set(S, _, {componentrelation,R,Id}) ->
+ check_componentrelation(S, R, Id);
+simplify_element_set(S, HostType, {Tag,{element_set,_,_}=El0}) ->
+ [El1] = simplify_element_sets(S, HostType, [El0]),
+ {Tag,El1};
+simplify_element_set(S, HostType, #type{}=Type) ->
+ simplify_element_set_type(S, HostType, Type);
+simplify_element_set(_, _, C) ->
+ C.
+
+simplify_element_set_type(S, HostType, #type{def=Def0}=Type0) ->
+ #'Externaltypereference'{} = Def0, %Assertion.
+ case get_referenced_type(S, Def0) of
+ {_,#valuedef{checked=false,value={valueset,Vs0}}} ->
+ [Vs1] = simplify_element_sets(S, HostType, [Vs0]),
+ case Vs1 of
+ {element_set,Set,none} ->
+ Set;
+ {element_set,Set,{set,[]}} ->
+ Set
+ end;
+ {_,{valueset,#type{def=#'Externaltypereference'{}}=Type}} ->
+ simplify_element_set_type(S, HostType, Type);
_ ->
- not_enumerated
+ case HostType of
+ #type{def=#'ObjectClassFieldType'{}} ->
+ %% Open type.
+ #type{def=Def} = check_type(S, HostType, Type0),
+ Def;
+ _ ->
+ #type{constraint=Cs} = check_type(S, HostType, Type0),
+ C = convert_back(Cs),
+ simplify_element_set(S, HostType, C)
+ end
end.
-resolve_namednumber_1(S, Name, NameList, Type) ->
- NamedNumberList = check_enumerated(S, NameList, Type#type.constraint),
- {_,N} = lookup_enum_value(S, Name, NamedNumberList),
- N.
-
-check_constraints(S,[{'ContainedSubtype',Type} | Rest], Acc) ->
- {RefMod,CTDef} = get_referenced_type(S,Type#type.def),
- NewS = S#state{module=load_asn1_module(S,RefMod),mname=RefMod,
- type=CTDef,tname=get_datastr_name(CTDef)},
- CType = check_type(NewS,S#state.tname,CTDef#typedef.typespec),
- check_constraints(S,Rest,CType#type.constraint ++ Acc);
-check_constraints(S,[C | Rest], Acc) ->
- check_constraints(S,Rest,[check_constraint(S,C) | Acc]);
-check_constraints(S,[],Acc) ->
- constraint_merge(S,Acc).
-
-
-range_check(F={FixV,FixV}) ->
-% FixV;
- F;
-range_check(VR={Lb,Ub}) when Lb < Ub ->
- VR;
-range_check(Err={_,_}) ->
- throw({error,{asn1,{illegal_size_constraint,Err}}});
-range_check(Value) ->
- Value.
-
-check_constraint(S,Ext) when is_record(Ext,'Externaltypereference') ->
- check_externaltypereference(S,Ext);
-
-
-check_constraint(S,{'SizeConstraint',{Lb,Ub}})
- when is_list(Lb); tuple_size(Lb) =:= 2 ->
- NewLb = range_check(resolv_tuple_or_list(S,Lb)),
- NewUb = range_check(resolv_tuple_or_list(S,Ub)),
- {'SizeConstraint',{NewLb,NewUb}};
-check_constraint(S,{'SizeConstraint',{Lb,Ub}}) ->
- case {resolv_value(S,Lb),resolv_value(S,Ub)} of
- {FixV,FixV} ->
- {'SizeConstraint',FixV};
- {Low,High} when Low < High ->
- {'SizeConstraint',{Low,High}};
- Err ->
- throw({error,{asn1,{illegal_size_constraint,Err}}})
- end;
-check_constraint(S,{'SizeConstraint',Lb}) ->
- {'SizeConstraint',resolv_value(S,Lb)};
+convert_back([H1,H2|T]) ->
+ {intersection,H1,convert_back([H2|T])};
+convert_back([H]) ->
+ H;
+convert_back([]) ->
+ none.
-check_constraint(S,{'SingleValue', L}) when is_list(L) ->
- F = fun(A) -> resolv_value(S,A) end,
- {'SingleValue',lists:sort(lists:map(F,L))};
-
-check_constraint(S,{'SingleValue', V}) when is_integer(V) ->
- Val = resolv_value(S,V),
-%% [{'SingleValue',Val},{'ValueRange',{Val,Val}}]; % Why adding value range?
- {'SingleValue',Val};
-check_constraint(S,{'SingleValue', V}) ->
- {'SingleValue',resolv_value(S,V)};
-
-check_constraint(S,{'ValueRange', {Lb, Ub}}) ->
- {'ValueRange',{resolv_value(S,Lb),resolv_value(S,Ub)}};
-%% In case of a constraint with extension marks like (1..Ub,...)
-check_constraint(S,{VR={'ValueRange', {_Lb, _Ub}},Rest}) ->
- {check_constraint(S,VR),Rest};
-check_constraint(_S,{'PermittedAlphabet',PA}) ->
- {'PermittedAlphabet',permitted_alphabet_cnstr(PA)};
-
-check_constraint(S,{valueset,Type}) ->
- {valueset,check_type(S,S#state.tname,Type)};
-
-check_constraint(_S,ST={simpletable,Type}) when is_atom(Type) ->
- %% An already checked constraint
- ST;
-check_constraint(S,{simpletable,Type}) ->
+check_simpletable(S, HostType, Type) ->
+ case HostType of
+ #type{def=#'ObjectClassFieldType'{}} ->
+ ok;
+ _ ->
+ %% Table constraints may only be applied to
+ %% CLASS.&field constructs.
+ asn1_error(S, illegal_table_constraint)
+ end,
Def = case Type of
#type{def=D} -> D;
- {'SingleValue',ObjRef = #'Externalvaluereference'{}} ->
- ObjRef
+ {'SingleValue',#'Externalvaluereference'{}=ObjRef} ->
+ ObjRef;
+ _ ->
+ asn1_error(S, invalid_table_constraint)
end,
- C = match_parameters(S,Def,S#state.parameters),
+ C = match_parameter(S, Def),
case C of
#'Externaltypereference'{} ->
- ERef = check_externaltypereference(S,C),
- {simpletable,ERef#'Externaltypereference'.type};
- #type{def=#'Externaltypereference'{}=ExtTypeRef} ->
- ERef = check_externaltypereference(S, ExtTypeRef),
+ ERef = check_externaltypereference(S, C),
{simpletable,ERef#'Externaltypereference'.type};
- {valueset,#type{def=ERef=#'Externaltypereference'{}}} -> % this is an object set
- {_,TDef} = get_referenced_type(S,ERef),
- case TDef#typedef.typespec of
- #'ObjectSet'{} ->
- check_object(S,TDef,TDef#typedef.typespec),
- {simpletable,ERef#'Externaltypereference'.type};
- Err ->
- exit({error,{internal_error,Err}})
- end;
#'Externalvaluereference'{} ->
%% This is an object set with a referenced object
- {_,TorVDef} = get_referenced_type(S,C),
- GetObjectSet =
- fun(#typedef{typespec=O}) when is_record(O,'Object') ->
- #'ObjectSet'{class=O#'Object'.classname,
- set={'SingleValue',C}};
- (#valuedef{type=Cl,value=O})
- when is_record(O,'Externalvaluereference'),
- is_record(Cl,type) ->
- %% an object might reference another object
- #'ObjectSet'{class=Cl#type.def,
- set={'SingleValue',O}};
- (Err) ->
- exit({error,{internal_error,simpletable_constraint,Err}})
- end,
- ObjSet = GetObjectSet(TorVDef),
- {simpletable,check_object(S,Type,ObjSet)};
- #'ObjectSet'{} ->
- io:format("ALERT: simpletable forbidden case!~n",[]),
- {simpletable,check_object(S,Type,C)};
- {'ValueFromObject',{_,ORef},FieldName} ->
- %% This is an ObjectFromObject
- {_,Object} = get_referenced_type(S,ORef),
- ChObject = check_object(S,Object,
- Object#typedef.typespec),
- ObjFromObj=
- get_fieldname_element(S,Object#typedef{
- typespec=ChObject},
- FieldName),
- {simpletable,ObjFromObj};
-%% ObjFromObj#typedef{checked=true,typespec=
-%% check_object(S,ObjFromObj,
-%% ObjFromObj#typedef.typespec)}};
- _ ->
- check_type(S,S#state.tname,Type),%% this seems stupid.
- OSName = Def#'Externaltypereference'.type,
- {simpletable,OSName}
- end;
+ {_,TorVDef} = get_referenced_type(S, C),
+ Set = case TorVDef of
+ #typedef{typespec=#'Object'{classname=ClassName}} ->
+ #'ObjectSet'{class=ClassName,
+ set={'SingleValue',C}};
+ #valuedef{type=#type{def=ClassDef},
+ value=#'Externalvaluereference'{}=Obj} ->
+ %% an object might reference another object
+ #'ObjectSet'{class=ClassDef,
+ set={'SingleValue',Obj}}
+ end,
+ {simpletable,check_object(S, Type, Set)};
+ {'ValueFromObject',{_,Object},FieldNames} ->
+ %% This is an ObjectFromObject.
+ {simpletable,extract_field(S, Object, FieldNames)}
+ end.
-check_constraint(S,{componentrelation,{objectset,Opos,Objset},Id}) ->
+check_componentrelation(S, {objectset,Opos,Objset0}, Id) ->
%% Objset is an 'Externaltypereference' record, since Objset is
%% a DefinedObjectSet.
- RealObjset = match_parameters(S,Objset,S#state.parameters),
- ObjSetRef =
- case RealObjset of
- #'Externaltypereference'{} -> RealObjset;
- #type{def=#'Externaltypereference'{}} -> RealObjset#type.def;
- {valueset,OS = #type{def=#'Externaltypereference'{}}} -> OS#type.def
- end,
- Ext = check_externaltypereference(S,ObjSetRef),
- {componentrelation,{objectset,Opos,Ext},Id};
+ ObjSet = match_parameter(S, Objset0),
+ Ext = check_externaltypereference(S, ObjSet),
+ {componentrelation,{objectset,Opos,Ext},Id}.
+
+%%%
+%%% Internal set representation.
+%%%
+%%% We represent sets as a union of strictly disjoint ranges:
+%%%
+%%% {set,[Range]}
+%%%
+%%% A range is represented as:
+%%%
+%%% Range = {a_range,UpperBound} | {range,LowerBound,UpperBound}
+%%%
+%%% We don't use the atom 'MIN' to represent MIN, because atoms
+%%% compare higher than integer. Instead we use {a_range,UpperBound}
+%%% to represent MIN..UpperBound. We represent MAX as 'MAX' because
+%%% 'MAX' compares higher than any integer.
+%%%
+%%% The ranges are sorted in term order. The ranges must not overlap
+%%% or be adjacent to each other. This invariant is established when
+%%% creating sets, and maintained by the intersection and union
+%%% operators.
+%%%
+%%% Example of invalid set representaions:
+%%%
+%%% [{range,0,10},{range,5,10}] %Overlapping ranges
+%%% [{range,0,5},{range,6,10}] %Adjancent ranges
+%%% [{range,10,20},{a_range,100}] %Not sorted
+%%%
+
+make_constr_set(_, 'MIN', Ub) ->
+ {set,[{a_range,make_constr_set_val(Ub)}]};
+make_constr_set(_, Lb, Ub) when Lb =< Ub ->
+ {set,[{range,make_constr_set_val(Lb),
+ make_constr_set_val(Ub)}]};
+make_constr_set(S, _, _) ->
+ asn1_error(S, reversed_range).
+
+make_constr_set_val([C]) when is_integer(C) -> C;
+make_constr_set_val(Val) -> Val.
+
+make_constr_set_vs(Vs) ->
+ {set,make_constr_set_vs_1(Vs)}.
+
+make_constr_set_vs_1([]) ->
+ [];
+make_constr_set_vs_1([V]) ->
+ [{range,V,V}];
+make_constr_set_vs_1([V0|Vs]) ->
+ V1 = make_constr_set_vs_1(Vs),
+ range_union([{range,V0,V0}], V1).
+
+%%%
+%%% Set operators.
+%%%
+
+cs_intersection(_S, Other, none) ->
+ Other;
+cs_intersection(_S, none, Other) ->
+ Other;
+cs_intersection(_S, {set,SetA}, {set,SetB}) ->
+ {set,range_intersection(SetA, SetB)};
+cs_intersection(_S, A, B) ->
+ {intersection,A,B}.
+
+range_intersection([], []) ->
+ [];
+range_intersection([_|_], []) ->
+ [];
+range_intersection([], [_|_]) ->
+ [];
+range_intersection([H1|_]=A, [H2|_]=B) when H1 > H2 ->
+ range_intersection(B, A);
+range_intersection([H1|T1], [H2|T2]=B) ->
+ %% Now H1 =< H2.
+ case {H1,H2} of
+ {{a_range,Ub0},{a_range,Ub1}} when Ub0 < Ub1 ->
+ %% Ub0 =/= 'MAX'
+ [H1|range_intersection(T1, [{range,Ub0+1,Ub1}|T2])];
+ {{a_range,_},{a_range,_}} ->
+ %% Must be equal.
+ [H1|range_intersection(T1, T2)];
+ {{a_range,Ub0},{range,Lb1,_Ub1}} when Ub0 < Lb1 ->
+ %% No intersection.
+ range_intersection(T1, B);
+ {{a_range,Ub0},{range,Lb1,Ub1}} when Ub0 < Ub1 ->
+ %% Ub0 =/= 'MAX'
+ [{range,Lb1,Ub0}|range_intersection(T1, [{range,Ub0+1,Ub1}|T2])];
+ {{a_range,Ub},{range,_Lb1,Ub}} ->
+ %% The first range covers the second range, but does not
+ %% go beyond. We handle this case specially because Ub may
+ %% be 'MAX', and evaluating 'MAX'+1 will fail.
+ [H2|range_intersection(T1, T2)];
+ {{a_range,Ub0},{range,_Lb1,Ub1}} ->
+ %% Ub0 > Ub1, Ub1 =/= 'MAX'. The first range completely
+ %% covers and extends beyond the second range.
+ [H2|range_intersection([{range,Ub1+1,Ub0}|T1], T2)];
+ {{range,_Lb0,Ub0},{range,Lb1,_Ub1}} when Ub0 < Lb1 ->
+ %% Lb0 < Lb1. No intersection.
+ range_intersection(T1, B);
+ {{range,_Lb0,Ub0},{range,Lb1,Ub1}} when Ub0 < Ub1 ->
+ %% Ub0 >= Lb1, Ub0 =/= 'MAX'. Partial overlap.
+ [{range,Lb1,Ub0}|range_intersection(T1, [{range,Ub0+1,Ub1}|T2])];
+ {{range,_Lb0,Ub},{range,_Lb1,Ub}} ->
+ %% The first range covers the second range, but does not
+ %% go beyond. We handle this case specially because Ub may
+ %% be 'MAX', and evaluating 'MAX'+1 will fail.
+ [H2|range_intersection(T1, T2)];
+ {{range,_Lb0,Ub0},{range,_Lb1,Ub1}} ->
+ %% Ub1 =/= MAX. The first range completely covers and
+ %% extends beyond the second.
+ [H2|range_intersection([{range,Ub1+1,Ub0}|T1], T2)]
+ end.
-check_constraint(S,Type) when is_record(Type,type) ->
- #type{def=Def} = check_type(S,S#state.tname,Type),
- Def;
+cs_union(_S, {set,SetA}, {set,SetB}) ->
+ {set,range_union(SetA, SetB)};
+cs_union(_S, A, B) ->
+ {union,A,B}.
+
+range_union(A, B) ->
+ range_union_1(lists:merge(A, B)).
+
+range_union_1([{a_range,Ub0},{a_range,Ub1}|T]) ->
+ range_union_1([{a_range,max(Ub0, Ub1)}|T]);
+range_union_1([{a_range,Ub0},{range,Lb1,Ub1}|T]) when Lb1-1 =< Ub0 ->
+ range_union_1([{a_range,max(Ub0, Ub1)}|T]);
+range_union_1([{a_range,_}=H|T]) ->
+ %% Ranges are disjoint.
+ [H|range_union_1(T)];
+range_union_1([{range,Lb0,Ub0},{range,Lb1,Ub1}|T]) when Lb1-1 =< Ub0 ->
+ range_union_1([{range,Lb0,max(Ub0, Ub1)}|T]);
+range_union_1([{range,_,_}=H|T]) ->
+ %% Ranges are disjoint.
+ [H|range_union_1(T)];
+range_union_1([]) ->
+ [].
-check_constraint(S,C) when is_list(C) ->
- lists:map(fun(X)->check_constraint(S,X) end,C);
-% else keep the constraint unchanged
-check_constraint(_S,Any) ->
-% io:format("Constraint = ~p~n",[Any]),
- Any.
-
-permitted_alphabet_cnstr(T) when is_tuple(T) ->
- permitted_alphabet_cnstr([T]);
-permitted_alphabet_cnstr(L) when is_list(L) ->
- VRexpand = fun({'ValueRange',{A,B}}) ->
- {'SingleValue',expand_valuerange(A,B)};
- (Other) ->
- Other
- end,
- L2 = lists:map(VRexpand,L),
- %% first perform intersection
- L3 = permitted_alphabet_intersection(L2),
- [Res] = permitted_alphabet_union(L3),
- Res.
+%%%
+%%% Finish up constrains, making them suitable for the back-ends.
+%%%
+%%% A 'PermittedAlphabet' (FROM) constraint will be reduced to:
+%%%
+%%% {'SingleValue',[integer()]}
+%%%
+%%% A 'SizeConstraint' (SIZE) constraint will be reduced to:
+%%%
+%%% {Lb,Ub}
+%%%
+%%% All other constraints will be reduced to:
+%%%
+%%% {'SingleValue',[integer()]} | {'ValueRange',Lb,Ub}
+%%%
+
+finish_constraints(Cs) ->
+ finish_constraints_1(Cs, fun smart_collapse/1).
+
+finish_constraints_1([{element_set,{Tag,{element_set,_,_}=Set0},none}|T],
+ Collapse0) ->
+ Collapse = collapse_fun(Tag),
+ case finish_constraints_1([Set0], Collapse) of
+ [] ->
+ finish_constraints_1(T, Collapse0);
+ [Set] ->
+ [{Tag,Set}|finish_constraints_1(T, Collapse0)]
+ end;
+finish_constraints_1([{element_set,{set,[{a_range,'MAX'}]},_}|T], Collapse) ->
+ finish_constraints_1(T, Collapse);
+finish_constraints_1([{element_set,{intersection,A0,B0},none}|T], Collapse) ->
+ A = {element_set,A0,none},
+ B = {element_set,B0,none},
+ finish_constraints_1([A,B|T], Collapse);
+finish_constraints_1([{element_set,Root,Ext}|T], Collapse) ->
+ case finish_constraint(Root, Ext, Collapse) of
+ none ->
+ finish_constraints_1(T, Collapse);
+ Constr ->
+ [Constr|finish_constraints_1(T, Collapse)]
+ end;
+finish_constraints_1([H|T], Collapse) ->
+ [H|finish_constraints_1(T, Collapse)];
+finish_constraints_1([], _) ->
+ [].
-expand_valuerange([A],[A]) ->
- [A];
-expand_valuerange([A],[B]) when A < B ->
- [A|expand_valuerange([A+1],[B])].
+finish_constraint({set,Root0}, Ext, Collapse) ->
+ case Collapse(Root0) of
+ none -> none;
+ Root -> finish_constraint(Root, Ext, Collapse)
+ end;
+finish_constraint(Root, Ext, _Collapse) ->
+ case Ext of
+ none -> Root;
+ _ -> {Root,[]}
+ end.
-permitted_alphabet_intersection(C) ->
- permitted_alphabet_merge(C,intersection, []).
+collapse_fun('SizeConstraint') ->
+ fun size_constraint_collapse/1;
+collapse_fun('PermittedAlphabet') ->
+ fun single_value_collapse/1.
-permitted_alphabet_union(C) ->
- permitted_alphabet_merge(C,union, []).
+single_value_collapse(V) ->
+ {'SingleValue',ordsets:from_list(single_value_collapse_1(V))}.
-permitted_alphabet_merge([],_,Acc) ->
- lists:reverse(Acc);
-permitted_alphabet_merge([{'SingleValue',L1},
- UorI,
- {'SingleValue',L2}|Rest],UorI,Acc)
- when is_list(L1),is_list(L2) ->
- UI = ordsets:UorI([ordsets:from_list(L1),ordsets:from_list(L2)]),
- permitted_alphabet_merge([{'SingleValue',UI}|Rest],UorI,Acc);
-permitted_alphabet_merge([C1|Rest],UorI,Acc) ->
- permitted_alphabet_merge(Rest,UorI,[C1|Acc]).
-
-
-%% constraint_merge/2
-%% Compute the intersection of the outermost level of the constraint list.
-%% See Dubuisson second paragraph and fotnote on page 285.
-%% If constraints with extension are included in combined constraints. The
-%% resulting combination will have the extension of the last constraint. Thus,
-%% there will be no extension if the last constraint is without extension.
-%% The rootset of all constraints are considered in the "outermoust
-%% intersection". See section 13.1.2 in Dubuisson.
-constraint_merge(St, Cs0) ->
- Cs = constraint_merge_1(St, Cs0),
- normalize_cs(Cs).
-
-normalize_cs([{'SingleValue',[V]}|Cs]) ->
- [{'SingleValue',V}|normalize_cs(Cs)];
-normalize_cs([{'SingleValue',[_|_]=L0}|Cs]) ->
- [H|T] = L = lists:usort(L0),
- [case is_range(H, T) of
- false -> {'SingleValue',L};
- true -> {'ValueRange',{H,lists:last(T)}}
- end|normalize_cs(Cs)];
-normalize_cs([{'ValueRange',{Sv,Sv}}|Cs]) ->
- [{'SingleValue',Sv}|normalize_cs(Cs)];
-normalize_cs([{'ValueRange',{'MIN','MAX'}}|Cs]) ->
- normalize_cs(Cs);
-normalize_cs([{'SizeConstraint',C0}|Cs]) ->
- case normalize_size_constraint(C0) of
- none ->
- normalize_cs(Cs);
- C ->
- [{'SizeConstraint',C}|normalize_cs(Cs)]
- end;
-normalize_cs([H|T]) ->
- [H|normalize_cs(T)];
-normalize_cs([]) -> [].
+single_value_collapse_1([{range,Lb,Ub}|T]) when is_integer(Lb),
+ is_integer(Ub) ->
+ lists:seq(Lb, Ub) ++ single_value_collapse_1(T);
+single_value_collapse_1([]) ->
+ [].
-%% Normalize a size constraint to make it non-ambiguous and
-%% easy to interpret for the backends.
-%%
-%% Returns one of the following terms:
-%% {LowerBound,UpperBound}
-%% {{LowerBound,UpperBound},[]} % Extensible
-%% none % Remove size constraint from list
-%%
-%% where:
-%% LowerBound = integer()
-%% UpperBound = integer() | 'MAX'
-
-normalize_size_constraint(Sv) when is_integer(Sv) ->
- {Sv,Sv};
-normalize_size_constraint({Root,Ext}) when is_list(Ext) ->
- {normalize_size_constraint(Root),[]};
-normalize_size_constraint({{_,_},Ext}) when is_integer(Ext) ->
- normalize_size_constraint(Ext);
-normalize_size_constraint([H|T]) ->
- {H,lists:last(T)};
-normalize_size_constraint({0,'MAX'}) ->
+smart_collapse([{a_range,Ub}]) ->
+ {'ValueRange',{'MIN',Ub}};
+smart_collapse([{a_range,_}|T]) ->
+ {range,_,Ub} = lists:last(T),
+ {'ValueRange',{'MIN',Ub}};
+smart_collapse([{range,Lb,Ub}]) ->
+ {'ValueRange',{Lb,Ub}};
+smart_collapse([_|_]=L) ->
+ V = lists:foldr(fun({range,Lb,Ub}, A) ->
+ seq(Lb, Ub) ++ A
+ end, [], L),
+ {'SingleValue',V}.
+
+size_constraint_collapse([{range,0,'MAX'}]) ->
none;
-normalize_size_constraint({Lb,Ub}=Range)
- when is_integer(Lb), is_integer(Ub) orelse Ub =:= 'MAX' ->
- Range.
+size_constraint_collapse(Root) ->
+ [{range,Lb,_}|_] = Root,
+ {range,_,Ub} = lists:last(Root),
+ {Lb,Ub}.
-is_range(Prev, [H|T]) when Prev =:= H - 1 -> is_range(H, T);
-is_range(_, [_|_]) -> false;
-is_range(_, []) -> true.
+seq(Same, Same) ->
+ [Same];
+seq(Lb, Ub) when is_integer(Lb), is_integer(Ub) ->
+ lists:seq(Lb, Ub).
-constraint_merge_1(_S, [H]=C) when is_tuple(H) ->
- C;
-constraint_merge_1(_S, []) ->
- [];
-constraint_merge_1(S, C) ->
- %% skip all extension but the last extension
- C1 = filter_extensions(C),
- %% perform all internal level intersections, intersections first
- %% since they have precedence over unions
- C2 = lists:map(fun(X)when is_list(X)->constraint_intersection(S,X);
- (X) -> X end,
- C1),
- %% perform all internal level unions
- C3 = lists:map(fun(X)when is_list(X)->constraint_union(S,X);
- (X) -> X end,
- C2),
-
- %% now get intersection of the outermost level
- %% get the least common single value constraint
- SVs = get_constraints(C3,'SingleValue'),
- CombSV = intersection_of_sv(S,SVs),
- %% get the least common value range constraint
- VRs = get_constraints(C3,'ValueRange'),
- CombVR = intersection_of_vr(S,VRs),
- %% get the least common size constraint
- SZs = get_constraints(C3,'SizeConstraint'),
- CombSZ = intersection_of_size(S,SZs),
- RestC = ordsets:subtract(ordsets:from_list(C3),
- ordsets:from_list(SZs ++ VRs ++ SVs)),
- %% get the least common combined constraint. That is the union of each
- %% deep constraint and merge of single value and value range constraints.
- %% FIXME: Removing 'intersection' from the flattened list essentially
- %% means that intersections are converted to unions!
- Cs = combine_constraints(S, CombSV, CombVR, CombSZ++RestC),
- [X || X <- lists:flatten(Cs),
- X =/= intersection,
- X =/= union].
-
-%% constraint_union(S,C) takes a list of constraints as input and
-%% merge them to a union. Unions are performed when two
-%% constraints is found with an atom union between.
-%% The list may be nested. Fix that later !!!
-constraint_union(_S,[]) ->
- [];
-constraint_union(_S,C=[_E]) ->
- C;
-constraint_union(S,C) when is_list(C) ->
- case lists:member(union,C) of
- true ->
- constraint_union1(S,C,[]);
- _ ->
- C
- end;
-% SV = get_constraints(C,'SingleValue'),
-% SV1 = constraint_union_sv(S,SV),
-% VR = get_constraints(C,'ValueRange'),
-% VR1 = constraint_union_vr(VR),
-% RestC = ordsets:filter(fun({'SingleValue',_})->false;
-% ({'ValueRange',_})->false;
-% (_) -> true end,ordsets:from_list(C)),
-% SV1++VR1++RestC;
-constraint_union(_S,C) ->
- [C].
-
-constraint_union1(S, [{'ValueRange',{Lb1,Ub1}},union,
- {'ValueRange',{Lb2,Ub2}}|Rest], Acc) ->
- AunionB = {'ValueRange',{c_min(Lb1, Lb2),max(Ub1, Ub2)}},
- constraint_union1(S, [AunionB|Rest], Acc);
-constraint_union1(S,[A={'SingleValue',_},union,B={'SingleValue',_}|Rest],Acc) ->
- AunionB = constraint_union_sv(S,[A,B]),
- constraint_union1(S,Rest,Acc ++ AunionB);
-constraint_union1(S,[A={'SingleValue',_},union,B={'ValueRange',_}|Rest],Acc) ->
- AunionB = union_sv_vr(S,A,B),
- constraint_union1(S, AunionB++Rest, Acc);
-constraint_union1(S,[A={'ValueRange',_},union,B={'SingleValue',_}|Rest],Acc) ->
- AunionB = union_sv_vr(S,B,A),
- constraint_union1(S, AunionB++Rest, Acc);
-constraint_union1(S,[union|Rest],Acc) -> %skip when unsupported constraints
- constraint_union1(S,Rest,Acc);
-constraint_union1(S,[A|Rest],Acc) ->
- constraint_union1(S,Rest,[A|Acc]);
-constraint_union1(_S,[],Acc) ->
- Acc.
+%%%-----------------------------------------
+%% If the constraint value is a defined value the valuename
+%% is replaced by the actual value
+%%
+resolve_value(S, HostType, Val) ->
+ Id = match_parameter(S, Val),
+ resolve_value1(S, HostType, Id).
-constraint_union_sv(_S,SV) ->
- Values=lists:map(fun({_,V})->V end,SV),
- case ordsets:from_list(Values) of
- [] -> [];
- [N] -> [{'SingleValue',N}];
- L -> [{'SingleValue',L}]
- end.
-c_min('MIN', _) -> 'MIN';
-c_min(_, 'MIN') -> 'MIN';
-c_min(A, B) -> min(A, B).
-
-union_sv_vr(_S,{'SingleValue',SV},VR)
- when is_integer(SV) ->
- union_sv_vr(_S,{'SingleValue',[SV]},VR);
-union_sv_vr(_S,{'SingleValue',SV},{'ValueRange',{VLb,VUb}})
- when is_list(SV) ->
- L = lists:sort(SV++[VLb,VUb]),
- {Lb,L1} = case lists:member('MIN',L) of
- true -> {'MIN',L--['MIN']}; % remove 'MIN' so it does not disturb
- false -> {hd(L),tl(L)}
- end,
- Ub = case lists:member('MAX',L1) of
- true -> 'MAX';
- false -> lists:last(L1)
- end,
- case SV of
- [H] -> H;
- _ -> SV
- end,
- %% for now we through away the Singlevalues so that they don't disturb
- %% in the code generating phase (the effective Valuerange is already
- %% calculated. If we want to keep the Singlevalues as well for
- %% use in code gen phases we need to introduce a new representation
- %% like {'ValueRange',{Lb,Ub},[ListOfRanges|AntiValues|Singlevalues]
- %% These could be used to generate guards which allows only the specific
- %% values , not the full range
- [{'ValueRange',{Lb,Ub}}].
-
-
-%% get_constraints/2
-%% Arguments are a list of constraints, which has the format {key,value},
-%% and a constraint type
-%% Returns a list of constraints only of the requested type or the atom
-%% 'no' if no such constraints were found
-get_constraints(L=[{CType,_}],CType) ->
- L;
-get_constraints(C,CType) ->
- keysearch_allwithkey(CType,1,C).
-
-%% keysearch_allwithkey(Key,Ix,L)
-%% Types:
-%% Key = is_atom()
-%% Ix = integer()
-%% L = [TwoTuple]
-%% TwoTuple = [{atom(),term()}|...]
-%% Returns a List that contains all
-%% elements from L that has a key Key as element Ix
-keysearch_allwithkey(Key,Ix,L) ->
- lists:filter(fun(X) when is_tuple(X) ->
- case element(Ix,X) of
- Key -> true;
- _ -> false
- end;
- (_) -> false
- end, L).
-
-
-%% filter_extensions(C)
-%% takes a list of constraints as input and returns a list with the
-%% constraints and all extensions but the last are removed.
-filter_extensions([L]) when is_list(L) ->
- [filter_extensions(L)];
-filter_extensions(C=[_H]) ->
- C;
-filter_extensions(C) when is_list(C) ->
- filter_extensions(C,[], []).
-
-filter_extensions([],Acc,[]) ->
- Acc;
-filter_extensions([],Acc,[EC|ExtAcc]) ->
- CwoExt = remove_extension(ExtAcc,[]),
- CwoExt ++ [EC|Acc];
-filter_extensions([C={A,_E}|T],Acc,ExtAcc) when is_tuple(A) ->
- filter_extensions(T,Acc,[C|ExtAcc]);
-filter_extensions([C={'SizeConstraint',{A,_B}}|T],Acc,ExtAcc)
- when is_list(A);is_tuple(A) ->
- filter_extensions(T,Acc,[C|ExtAcc]);
-filter_extensions([C={'PermittedAlphabet',{{'SingleValue',_},E}}|T],Acc,ExtAcc)
- when is_tuple(E); is_list(E) ->
- filter_extensions(T,Acc,[C|ExtAcc]);
-filter_extensions([H|T],Acc,ExtAcc) ->
- filter_extensions(T,[H|Acc],ExtAcc).
-
-remove_extension([],Acc) ->
- Acc;
-remove_extension([{'SizeConstraint',{A,_B}}|R],Acc) ->
- remove_extension(R,[{'SizeConstraint',A}|Acc]);
-remove_extension([{C,_E}|R],Acc) when is_tuple(C) ->
- remove_extension(R,[C|Acc]);
-remove_extension([{'PermittedAlphabet',{A={'SingleValue',_},
- E}}|R],Acc)
- when is_tuple(E);is_list(E) ->
- remove_extension(R,[{'PermittedAlphabet',A}|Acc]).
-
-%% constraint_intersection(S,C) takes a list of constraints as input and
-%% performs intersections. Intersecions are performed when an
-%% atom intersection is found between two constraints.
-%% The list may be nested. Fix that later !!!
-constraint_intersection(_S,[]) ->
- [];
-constraint_intersection(_S,C=[_E]) ->
- C;
-constraint_intersection(S,C) when is_list(C) ->
-% io:format("constraint_intersection: ~p~n",[C]),
- case lists:member(intersection,C) of
- true ->
- constraint_intersection1(S,C,[]);
- _ ->
- C
+resolve_value1(S, HostType, #'Externalvaluereference'{value=Name}=ERef) ->
+ case resolve_namednumber(S, HostType, Name) of
+ V when is_integer(V) ->
+ V;
+ not_named ->
+ resolve_value1(S, HostType, get_referenced_value(S, ERef))
end;
-constraint_intersection(_S,C) ->
- [C].
-
-constraint_intersection1(S,[A,intersection,B|Rest],Acc) ->
- AisecB = c_intersect(S,A,B),
- constraint_intersection1(S, AisecB++Rest, Acc);
-constraint_intersection1(S,[A|Rest],Acc) ->
- constraint_intersection1(S,Rest,[A|Acc]);
-constraint_intersection1(_, [], [C]) ->
- C;
-constraint_intersection1(_,[],Acc) ->
- lists:reverse(Acc).
-
-c_intersect(S,C1={'SingleValue',_},C2={'SingleValue',_}) ->
- intersection_of_sv(S,[C1,C2]);
-c_intersect(S,C1={'ValueRange',_},C2={'ValueRange',_}) ->
- intersection_of_vr(S,[C1,C2]);
-c_intersect(S,C1={'ValueRange',_},C2={'SingleValue',_}) ->
- intersection_sv_vr(S,[C2],[C1]);
-c_intersect(S,C1={'SingleValue',_},C2={'ValueRange',_}) ->
- intersection_sv_vr(S,[C1],[C2]);
-c_intersect(_S,C1,C2) ->
- [C1,C2].
-
-%% combine_constraints(S,SV,VR,CComb)
-%% Types:
-%% S = is_record(state,S)
-%% SV = [] | [SVC]
-%% VR = [] | [VRC]
-%% CComb = [] | [Lists]
-%% SVC = {'SingleValue',integer()} | {'SingleValue',[integer(),...]}
-%% VRC = {'ValueRange',{Lb,Ub}}
-%% Lists = List of lists containing any constraint combination
-%% Lb = 'MIN' | integer()
-%% Ub = 'MAX' | integer()
-%% Returns a combination of the least common constraint among SV,VR and all
-%% elements in CComb
-combine_constraints(_S,[],VR,CComb) ->
- VR ++ CComb;
-% combine_combined_cnstr(S,VR,CComb);
-combine_constraints(_S,SV,[],CComb) ->
- SV ++ CComb;
-% combine_combined_cnstr(S,SV,CComb);
-combine_constraints(S,SV,VR,CComb) ->
- C=intersection_sv_vr(S,SV,VR),
- C ++ CComb.
-% combine_combined_cnstr(S,C,CComb).
-
-intersection_sv_vr(_S,[C1={'SingleValue',SV}],[C2={'ValueRange',{_Lb,_Ub}}])
- when is_integer(SV) ->
- case is_int_in_vr(SV,C2) of
- true -> [C1];
- _ -> %%error({type,{"asn1 illegal constraint",C1,C2},S})
- %throw({error,{"asn1 illegal constraint",C1,C2}})
- %io:format("warning: could not analyze constraint ~p~n",[[C1,C2]]),
- [C1,C2]
+resolve_value1(S, HostType, {gt,V}) ->
+ case resolve_value1(S, HostType, V) of
+ Int when is_integer(Int) ->
+ Int + 1;
+ _Other ->
+ asn1_error(S, illegal_integer_value)
end;
-intersection_sv_vr(_S,[C1={'SingleValue',SV}],[C2])
- when is_list(SV) ->
- case lists:filter(fun(X)->is_int_in_vr(X,C2) end,SV) of
- [] ->
- %%error({type,{"asn1 illegal constraint",C1,C2},S});
- %throw({error,{"asn1 illegal constraint",C1,C2}});
- %io:format("warning: could not analyze constraint ~p~n",[[C1,C2]]),
- [C1,C2];
- [V] -> [{'SingleValue',V}];
- L -> [{'SingleValue',L}]
- end.
-
-
-%% Size constraint [{'SizeConstraint',1},{'SizeConstraint',{{1,64},[]}}]
-
-intersection_of_size(_,[]) ->
- [];
-intersection_of_size(_,C=[_SZ]) ->
- C;
-intersection_of_size(S,[SZ,SZ|Rest]) ->
- intersection_of_size(S,[SZ|Rest]);
-intersection_of_size(S,C=[C1={_,Int},{_,Range}|Rest])
- when is_integer(Int),is_tuple(Range) ->
- case Range of
- {Lb,Ub} when Int >= Lb,
- Int =< Ub ->
- intersection_of_size(S,[C1|Rest]);
- {{Lb,Ub},Ext} when is_list(Ext),Int >= Lb,Int =< Ub ->
- intersection_of_size(S,[C1|Rest]);
- _ ->
- throw({error,{asn1,{illegal_size_constraint,C}}})
+resolve_value1(S, HostType, {lt,V}) ->
+ case resolve_value1(S, HostType, V) of
+ Int when is_integer(Int) ->
+ Int - 1;
+ _Other ->
+ asn1_error(S, illegal_integer_value)
end;
-intersection_of_size(S,[C1={_,Range},C2={_,Int}|Rest])
- when is_integer(Int),is_tuple(Range) ->
- intersection_of_size(S,[C2,C1|Rest]);
-intersection_of_size(S,[{_,{Lb1,Ub1}},{_,{Lb2,Ub2}}|Rest]) ->
- Lb=greatest_LB(ordsets:from_list([Lb1,Lb2])),
- Ub=smallest_UB(ordsets:from_list([Ub1,Ub2])),
- intersection_of_size(S,[{'SizeConstraint',{Lb,Ub}}|Rest]);
-intersection_of_size(_,SZ) ->
- throw({error,{asn1,{illegal_size_constraint,SZ}}}).
-
-intersection_of_vr(_,[]) ->
- [];
-intersection_of_vr(_,VR=[_C]) ->
- VR;
-intersection_of_vr(S,[{_,{Lb1,Ub1}},{_,{Lb2,Ub2}}|Rest]) ->
- Lb=greatest_LB(ordsets:from_list([Lb1,Lb2])),
- Ub=smallest_UB(ordsets:from_list([Ub1,Ub2])),
- intersection_of_vr(S,[{'ValueRange',{Lb,Ub}}|Rest]);
-intersection_of_vr(_S,VR) ->
- %%error({type,{asn1,{illegal_value_range_constraint,VR}},S});
- throw({error,{asn1,{illegal_value_range_constraint,VR}}}).
-
-intersection_of_sv(_,[]) ->
- [];
-intersection_of_sv(_,SV=[_C]) ->
- SV;
-intersection_of_sv(S,[SV,SV|Rest]) ->
- intersection_of_sv(S,[SV|Rest]);
-intersection_of_sv(S,[{_,Int},{_,SV}|Rest]) when is_integer(Int),
- is_list(SV) ->
- SV2=intersection_of_sv1(S,Int,SV),
- intersection_of_sv(S,[SV2|Rest]);
-intersection_of_sv(S,[{_,SV},{_,Int}|Rest]) when is_integer(Int),
- is_list(SV) ->
- SV2=intersection_of_sv1(S,Int,SV),
- intersection_of_sv(S,[SV2|Rest]);
-intersection_of_sv(S,[{_,SV1},{_,SV2}|Rest]) when is_list(SV1),
- is_list(SV2) ->
- SV3=common_set(SV1,SV2),
- intersection_of_sv(S,[SV3|Rest]);
-intersection_of_sv(_S,SV) ->
- %%error({type,{asn1,{illegal_single_value_constraint,SV}},S}).
- throw({error,{asn1,{illegal_single_value_constraint,SV}}}).
-
-intersection_of_sv1(_S,Int,SV) when is_integer(Int),is_list(SV) ->
- case lists:member(Int,SV) of
- true -> {'SingleValue',Int};
+resolve_value1(S, _HostType, {'ValueFromObject',{object,Object},FieldName}) ->
+ get_value_from_object(S, Object, FieldName);
+resolve_value1(_, _, #valuedef{checked=true,value=V}) ->
+ V;
+resolve_value1(S, _, #valuedef{value={'ValueFromObject',
+ {object,Object},FieldName}}) ->
+ get_value_from_object(S, Object, FieldName);
+resolve_value1(S, _HostType, #valuedef{}=VDef) ->
+ #valuedef{value=Val} = check_value(S,VDef),
+ Val;
+resolve_value1(_, _, V) ->
+ V.
+
+resolve_namednumber(S, #type{def=Def}, Name) ->
+ case Def of
+ {'ENUMERATED',NameList} ->
+ resolve_namednumber_1(S, Name, NameList);
+ {'INTEGER',NameList} ->
+ resolve_namednumber_1(S, Name, NameList);
_ ->
- %%error({type,{asn1,{illegal_single_value_constraint,Int,SV}},S})
- throw({error,{asn1,{illegal_single_value_constraint,Int,SV}}})
- end;
-intersection_of_sv1(_S,SV1,SV2) ->
- %%error({type,{asn1,{illegal_single_value_constraint,SV1,SV2}},S}).
- throw({error,{asn1,{illegal_single_value_constraint,SV1,SV2}}}).
+ not_named
+ end.
-greatest_LB([H]) ->
- H;
-greatest_LB(L) ->
- greatest_LB1(lists:reverse(L)).
-greatest_LB1(['MIN',H2|_T])->
- H2;
-greatest_LB1([H|_T]) ->
- H.
-smallest_UB(L) ->
- hd(L).
-
-common_set(SV1,SV2) ->
- lists:filter(fun(X)->lists:member(X,SV1) end,SV2).
-
-is_int_in_vr(Int,{_,{'MIN','MAX'}}) when is_integer(Int) ->
- true;
-is_int_in_vr(Int,{_,{'MIN',Ub}}) when is_integer(Int),Int =< Ub ->
- true;
-is_int_in_vr(Int,{_,{Lb,'MAX'}}) when is_integer(Int),Int >= Lb ->
- true;
-is_int_in_vr(Int,{_,{Lb,Ub}}) when is_integer(Int),Int >= Lb,Int =< Ub ->
- true;
-is_int_in_vr(_,_) ->
- false.
-
+resolve_namednumber_1(S, Name, NameList) ->
+ try
+ NamedNumberList = check_enumerated(S, NameList),
+ {_,N} = lookup_enum_value(S, Name, NamedNumberList),
+ N
+ catch _:_ ->
+ not_named
+ end.
+
+%%%
+%%% End of constraint handling.
+%%%
check_imported(S,Imodule,Name) ->
check_imported(S,Imodule,Name,false).
@@ -4510,18 +3757,28 @@ check_reference(S,#'Externaltypereference'{pos=Pos,module=Emod,type=Name}) ->
#'Externaltypereference'{pos=Pos,module=ModName,type=Name}
end.
+get_referenced_value(S, T) ->
+ case get_referenced_type(S, T) of
+ {ExtMod,#valuedef{value=#'Externalvaluereference'{}=Ref}} ->
+ get_referenced_value(update_state(S, ExtMod), Ref);
+ {_,#valuedef{value=Val}} ->
+ Val
+ end.
+
get_referenced_type(S, T) ->
+ get_referenced_type(S, T, false).
+
+get_referenced_type(S, T, Recurse) ->
case do_get_referenced_type(S, T) of
- {_,#type{def=#'Externaltypereference'{}=ERef}} ->
- get_referenced_type(S, ERef);
- {_,#type{def=#'Externalvaluereference'{}=VRef}} ->
- get_referenced_type(S, VRef);
+ {_,#typedef{typespec=#type{def=#'Externaltypereference'{}=ERef}}}
+ when Recurse ->
+ get_referenced_type(S, ERef, Recurse);
{_,_}=Res ->
Res
end.
-do_get_referenced_type(#state{parameters=Ps}=S, T0) ->
- case match_parameters(S, T0, Ps) of
+do_get_referenced_type(S, T0) ->
+ case match_parameter(S, T0) of
T0 ->
do_get_ref_type_1(S, T0);
T ->
@@ -4563,7 +3820,7 @@ get_referenced(S,Emod,Ename,Pos) ->
%% May be an imported entity in module Emod or Emod may not exist
case asn1_db:dbget(Emod,'MODULE') of
undefined ->
- throw({error,{asn1,{module_not_found,Emod}}});
+ asn1_error(S, {undefined_import, Ename, Emod});
_ ->
NewS = update_state(S,Emod),
get_imported(NewS,Ename,Emod,Pos)
@@ -4593,12 +3850,11 @@ get_imported(S,Name,Module,Pos) ->
parse_and_save(S,Imodule),
case asn1_db:dbget(Imodule,'MODULE') of
undefined ->
- throw({error,{asn1,{module_not_found,Imodule}}});
+ asn1_error(S, {undefined_import, Name, Module});
Im when is_record(Im,module) ->
case is_exported(Im,Name) of
false ->
- throw({error,
- {asn1,{not_exported,{Im,Name}}}});
+ asn1_error(S, {undefined_export, Name});
_ ->
?dbg("get_imported, is_exported ~p, ~p~n",[Imodule,Name]),
get_referenced_type(S,
@@ -4611,37 +3867,6 @@ get_imported(S,Name,Module,Pos) ->
get_renamed_reference(S,Name,Module)
end.
-check_and_save(S,#'Externaltypereference'{module=M}=ERef,#typedef{checked=false}=TDef,Settings)
- when S#state.mname /= M ->
- %% This ERef is an imported type (or maybe a set.asn compilation)
- NewS = S#state{mname=M,module=load_asn1_module(S,M),
- type=TDef,tname=get_datastr_name(TDef)},
- Type=check_type(NewS,TDef,TDef#typedef.typespec),%XXX
- CheckedTDef = TDef#typedef{checked=true,
- typespec=Type},
- asn1_db:dbput(M,get_datastr_name(TDef),CheckedTDef),
- {merged_name(S,ERef),Settings};
-check_and_save(S,#'Externaltypereference'{module=M,type=N}=Eref,
- #ptypedef{name=Name,args=Params} = PTDef,Settings) ->
- %% instantiate a parameterized type
- %% The parameterized type should be saved as a type in the module
- %% it was instantiated.
- NewS = S#state{mname=M,module=load_asn1_module(S,M),
- type=PTDef,tname=Name},
- {Args,RestSettings} = lists:split(length(Params),Settings),
- Type = check_type(NewS,PTDef,#type{def={pt,Eref,Args}}),
- ERefName = new_reference_name(N),
- ERefNew = #'Externaltypereference'{type=ERefName,module=S#state.mname},
- NewTDef=#typedef{checked=true,name=ERefName,
- typespec=Type},
- insert_once(S,parameterized_objects,{ERefName,type,NewTDef}),
- asn1_db:dbput(S#state.mname,ERefNew#'Externaltypereference'.type,
- NewTDef),
- {ERefNew,RestSettings};
-check_and_save(_S,ERef,TDef,Settings) ->
- %% This might be a renamed type in a set of specs, so rename the ERef
- {ERef#'Externaltypereference'{type=asn1ct:get_name_of_def(TDef)},Settings}.
-
save_object_set_instance(S,Name,ObjSetSpec)
when is_record(ObjSetSpec,'ObjectSet') ->
NewObjSet = #typedef{checked=true,name=Name,typespec=ObjSetSpec},
@@ -4708,18 +3933,14 @@ update_state(S,ModuleName) ->
S;
_ ->
parse_and_save(S,ModuleName),
- case asn1_db:dbget(ModuleName,'MODULE') of
- RefedMod when is_record(RefedMod,module) ->
- S#state{mname=ModuleName,module=RefedMod};
- _ -> throw({error,{asn1,{module_does_not_exist,ModuleName}}})
- end
+ Mod = #module{} = asn1_db:dbget(ModuleName,'MODULE'),
+ S#state{mname=ModuleName,module=Mod}
end.
-
get_renamed_reference(S,Name,Module) ->
case renamed_reference(S,Name,Module) of
undefined ->
- throw({error,{asn1,{undefined_type,Name}}});
+ asn1_error(S, {undefined, Name});
NewTypeName when NewTypeName =/= Name ->
get_referenced1(S,Module,NewTypeName,undefined)
end.
@@ -4770,37 +3991,49 @@ get_importmoduleoftype([I|Is],Name) ->
get_importmoduleoftype([],_) ->
undefined.
+match_parameters(S, Names) ->
+ [match_parameter(S, Name) || Name <- Names].
-match_parameters(_S,Name,[]) ->
- Name;
+match_parameter(#state{parameters=Ps}=S, Name) ->
+ match_parameter(S, Name, Ps).
-match_parameters(_S,#'Externaltypereference'{type=Name},[{#'Externaltypereference'{type=Name},NewName}|_T]) ->
+match_parameter(_S, Name, []) ->
+ Name;
+match_parameter(S, {valueset,{element_set,#type{}=Ts,none}}, Ps) ->
+ match_parameter(S, {valueset,Ts}, Ps);
+match_parameter(_S, #'Externaltypereference'{type=Name},
+ [{#'Externaltypereference'{type=Name},NewName}|_T]) ->
NewName;
-match_parameters(_S,#'Externaltypereference'{type=Name},[{{_,#'Externaltypereference'{type=Name}},NewName}|_T]) ->
+match_parameter(_S, #'Externaltypereference'{type=Name},
+ [{{_,#'Externaltypereference'{type=Name}},NewName}|_T]) ->
NewName;
-match_parameters(_S,#'Externalvaluereference'{value=Name},[{#'Externalvaluereference'{value=Name},NewName}|_T]) ->
+match_parameter(_S, #'Externalvaluereference'{value=Name},
+ [{#'Externalvaluereference'{value=Name},NewName}|_T]) ->
NewName;
-match_parameters(_S,#'Externalvaluereference'{value=Name},[{{_,#'Externalvaluereference'{value=Name}},NewName}|_T]) ->
+match_parameter(_S, #'Externalvaluereference'{value=Name},
+ [{{_,#'Externalvaluereference'{value=Name}},NewName}|_T]) ->
NewName;
-match_parameters(_S,#type{def=#'Externaltypereference'{module=M,type=Name}},
- [{#'Externaltypereference'{module=M,type=Name},Type}]) ->
+match_parameter(_S, #type{def=#'Externaltypereference'{module=M,type=Name}},
+ [{#'Externaltypereference'{module=M,type=Name},Type}]) ->
Type;
-match_parameters(_S,{valueset,#type{def=#'Externaltypereference'{type=Name}}},
- [{{_,#'Externaltypereference'{type=Name}},{valueset,#type{def=NewName}}}|_T]) ->
+match_parameter(_S, {valueset,#type{def=#'Externaltypereference'{type=Name}}},
+ [{{_,#'Externaltypereference'{type=Name}},
+ {valueset,#type{def=NewName}}}|_T]) ->
NewName;
-match_parameters(_S,{valueset,#type{def=#'Externaltypereference'{type=Name}}},
- [{{_,#'Externaltypereference'{type=Name}},
- NewName=#type{def=#'Externaltypereference'{}}}|_T]) ->
+match_parameter(_S, {valueset,#type{def=#'Externaltypereference'{type=Name}}},
+ [{{_,#'Externaltypereference'{type=Name}},
+ NewName=#type{def=#'Externaltypereference'{}}}|_T]) ->
NewName#type.def;
-match_parameters(_S,{valueset,#type{def=#'Externaltypereference'{type=Name}}},
- [{{_,#'Externaltypereference'{type=Name}},NewName}|_T]) ->
+match_parameter(_S, {valueset,#type{def=#'Externaltypereference'{type=Name}}},
+ [{{_,#'Externaltypereference'{type=Name}},NewName}|_T]) ->
NewName;
%% When a parameter is a parameterized element it has to be
%% instantiated now!
-match_parameters(S,{valueset,T=#type{def={pt,_,_Args}}},_Parameters) ->
- case catch check_type(S,#typedef{name=S#state.tname,typespec=T},T) of
- pobjectsetdef ->
-
+match_parameter(S, {valueset,T=#type{def={pt,_,_Args}}}, _Ps) ->
+ try check_type(S,#typedef{name=S#state.tname,typespec=T},T) of
+ #type{def=Ts} ->
+ Ts
+ catch pobjectsetdef ->
{_,ObjRef,_Params} = T#type.def,
{_,ObjDef}=get_referenced_type(S,ObjRef),
%%ObjDef is a pvaluesetdef where the type field holds the class
@@ -4818,17 +4051,15 @@ match_parameters(S,{valueset,T=#type{def={pt,_,_Args}}},_Parameters) ->
ObjectSet = #'ObjectSet'{class=RightClassRef,set=T},
ObjSpec = check_object(S,#typedef{typespec=ObjectSet},ObjectSet),
Name = list_to_atom(asn1ct_gen:list2name([get_datastr_name(ObjDef)|S#state.recordtopname])),
- save_object_set_instance(S,Name,ObjSpec);
- pvaluesetdef -> error({pvaluesetdef,"parameterized valueset",S});
- {error,_Reason} -> error({type,"error in parameter",S});
- Ts when is_record(Ts,type) -> Ts#type.def
+ save_object_set_instance(S,Name,ObjSpec)
end;
+
%% same as previous, only depends on order of parsing
-match_parameters(S,{valueset,{pos,{objectset,_,POSref},Args}},Parameters) ->
- match_parameters(S,{valueset,#type{def={pt,POSref,Args}}},Parameters);
-match_parameters(S,Name, [_H|T]) ->
- %%io:format("match_parameters(~p,~p)~n",[Name,[H|T]]),
- match_parameters(S,Name,T).
+match_parameter(S, {valueset,{pos,{objectset,_,POSref},Args}}, Ps) ->
+ match_parameter(S, {valueset,#type{def={pt,POSref,Args}}}, Ps);
+match_parameter(S, Name, [_H|T]) ->
+ %%io:format("match_parameter(~p,~p)~n",[Name,[H|T]]),
+ match_parameter(S, Name, T).
imported(S,Name) ->
{imports,Ilist} = (S#state.module)#module.imports,
@@ -4854,7 +4085,6 @@ check_named_number_list(_S, [{_,_}|_]=NNL) ->
NNL;
check_named_number_list(S, NNL0) ->
%% Check that the names are unique.
- T = S#state.type,
case check_unique(NNL0, 2) of
[] ->
NNL1 = [{Id,resolve_valueref(S, Val)} || {'NamedNumber',Id,Val} <- NNL0],
@@ -4863,14 +4093,14 @@ check_named_number_list(S, NNL0) ->
[] ->
NNL;
[Val|_] ->
- asn1_error(S, T, {value_reused,Val})
+ asn1_error(S, {value_reused,Val})
end;
[H|_] ->
- asn1_error(S, T, {namelist_redefinition,H})
+ asn1_error(S, {namelist_redefinition,H})
end.
-resolve_valueref(S, #'Externalvaluereference'{module=Mod,value=Name}) ->
- dbget_ex(S, Mod, Name);
+resolve_valueref(S, #'Externalvaluereference'{} = T) ->
+ get_referenced_value(S, T);
resolve_valueref(_, Val) when is_integer(Val) ->
Val.
@@ -4879,7 +4109,7 @@ check_integer(S, NNL) ->
check_bitstring(S, NNL0) ->
NNL = check_named_number_list(S, NNL0),
- _ = [asn1_error(S, S#state.type, {invalid_bit_number,Bit}) ||
+ _ = [asn1_error(S, {invalid_bit_number,Bit}) ||
{_,Bit} <- NNL, Bit < 0],
NNL.
@@ -4904,7 +4134,7 @@ check_type_identifier(S, Eref=#'Externaltypereference'{type=Class}) ->
{_,TD=#typedef{typespec=#type{def=#'Externaltypereference'{}}}} ->
check_type_identifier(S, (TD#typedef.typespec)#type.def);
_ ->
- asn1_error(S, S#state.type, {illegal_instance_of,Class})
+ asn1_error(S, {illegal_instance_of,Class})
end.
iof_associated_type(S,[]) ->
@@ -4913,12 +4143,7 @@ iof_associated_type(S,[]) ->
case get(instance_of) of
undefined ->
AssociateSeq = iof_associated_type1(S,[]),
- Tag =
- case S#state.erule of
- ber ->
- [?TAG_CONSTRUCTED(?N_INSTANCE_OF)];
- _ -> []
- end,
+ Tag = [?TAG_CONSTRUCTED(?N_INSTANCE_OF)],
TypeDef=#typedef{checked=true,
name='INSTANCE OF',
typespec=#type{tag=Tag,
@@ -4944,16 +4169,11 @@ iof_associated_type1(S,C) ->
[] -> 'ASN1_OPEN_TYPE';
_ -> {typefield,'Type'}
end,
- {ObjIdTag,C1TypeTag}=
- case S#state.erule of
- ber ->
- {[{'UNIVERSAL',8}],
- [#tag{class='UNIVERSAL',
- number=6,
- type='IMPLICIT',
- form=0}]};
- _ -> {[{'UNIVERSAL','INTEGER'}],[]}
- end,
+ ObjIdTag = [{'UNIVERSAL',8}],
+ C1TypeTag = [#tag{class='UNIVERSAL',
+ number=6,
+ type='IMPLICIT',
+ form=0}],
TypeIdentifierRef=#'Externaltypereference'{module=ModuleName,
type='TYPE-IDENTIFIER'},
ObjectIdentifier =
@@ -4992,9 +4212,13 @@ iof_associated_type1(S,C) ->
%% returns the leading attribute, the constraint of the components and
%% the tablecinf value for the second component.
-instance_of_constraints(_,[]) ->
+instance_of_constraints(_, []) ->
{false,[],[],[]};
-instance_of_constraints(S, [{simpletable,Type}]) ->
+instance_of_constraints(S, [{element_set,{simpletable,C},none}]) ->
+ {element_set,Type,none} = C,
+ instance_of_constraints_1(S, Type).
+
+instance_of_constraints_1(S, Type) ->
#type{def=#'Externaltypereference'{type=Name}} = Type,
ModuleName = S#state.mname,
ObjectSetRef=#'Externaltypereference'{module=ModuleName,
@@ -5014,93 +4238,100 @@ instance_of_constraints(S, [{simpletable,Type}]) ->
valueindex=[]},
{TableCInf,[{simpletable,Name}],CRel,[{objfun,ObjectSetRef}]}.
-%% Check ENUMERATED
-%% ****************************************
-%% Check that all values are unique
-%% assign values to un-numbered identifiers
-%% check that the constraints are allowed and correct
-%% put the updated info back into database
-check_enumerated(_S,[{Name,Number}|_Rest]= NNList,_Constr) when is_atom(Name), is_integer(Number)->
- %% already checked , just return the same list
- NNList;
-check_enumerated(_S,{[{Name,Number}|_Rest],L}= NNList,_Constr) when is_atom(Name), is_integer(Number), is_list(L)->
- %% already checked , contains extension marker, just return the same lists
- NNList;
-check_enumerated(S,NamedNumberList,_Constr) ->
- check_enum(S,NamedNumberList,[],[],[]).
-
-%% identifiers are put in Acc2
-%% returns either [{Name,Number}] or {[{Name,Number}],[{ExtName,ExtNumber}]}
-%% the latter is returned if the ENUMERATION contains EXTENSIONMARK
-check_enum(S,[{'NamedNumber',Id,Num}|T],Acc1,Acc2,Root) when is_integer(Num) ->
- check_enum(S,T,[{Id,Num}|Acc1],Acc2,Root);
-check_enum(S,['EXTENSIONMARK'|T],Acc1,Acc2,_Root) ->
- NewAcc2 = lists:keysort(2,Acc1),
- NewList = enum_number(lists:reverse(Acc2),NewAcc2,0,[],[]),
- { NewList, check_enum(S,T,[],[],enum_counts(NewList))};
-check_enum(S,[Id|T],Acc1,Acc2,Root) when is_atom(Id) ->
- check_enum(S,T,Acc1,[Id|Acc2],Root);
-check_enum(_S,[],Acc1,Acc2,Root) ->
- NewAcc2 = lists:keysort(2,Acc1),
- enum_number(lists:reverse(Acc2),NewAcc2,0,[],Root).
-
-
-% assign numbers to identifiers , numbers from 0 ... but must not
-% be the same as already assigned to NamedNumbers
-enum_number(Identifiers,NamedNumbers,Cnt,Acc,[]) ->
- enum_number(Identifiers,NamedNumbers,Cnt,Acc);
-enum_number(Identifiers,NamedNumbers,_Cnt,Acc,CountL) ->
- enum_extnumber(Identifiers,NamedNumbers,Acc,CountL).
-
-enum_number([H|T],[{Id,Num}|T2],Cnt,Acc) when Num > Cnt ->
- enum_number(T,[{Id,Num}|T2],Cnt+1,[{H,Cnt}|Acc]);
-enum_number([H|T],[{Id,Num}|T2],Cnt,Acc) when Num < Cnt -> % negative Num
- enum_number(T,T2,Cnt+1,[{H,Cnt},{Id,Num}|Acc]);
-enum_number([],L2,_Cnt,Acc) ->
- lists:append([lists:reverse(Acc),L2]);
-enum_number(L,[{Id,Num}|T2],Cnt,Acc) -> % Num == Cnt
- enum_number(L,T2,Cnt+1,[{Id,Num}|Acc]);
-enum_number([H|T],[],Cnt,Acc) ->
- enum_number(T,[],Cnt+1,[{H,Cnt}|Acc]).
-
-enum_extnumber(Identifiers,NamedNumbers,Acc,[C]) ->
- check_add_enum_numbers(NamedNumbers,[C]),
- enum_number(Identifiers,NamedNumbers,C,Acc);
-enum_extnumber([H|T],[{Id,Num}|T2],Acc,[C|Counts]) when Num > C ->
- enum_extnumber(T,[{Id,Num}|T2],[{H,C}|Acc],Counts);
-enum_extnumber([],L2,Acc,Cnt) ->
- check_add_enum_numbers(L2, Cnt),
- lists:concat([lists:reverse(Acc),L2]);
-enum_extnumber(_Identifiers,[{Id,Num}|_T2],_Acc,[C|_]) when Num < C ->
-%% enum_extnumber(Identifiers,T2,[{Id,Num}|Acc],Counts);
- exit({error,{asn1,"AdditionalEnumeration element with same number as root element",{Id,Num}}});
-enum_extnumber(Identifiers,[{Id,Num}|T2],Acc,[_C|Counts]) -> % Num =:= C
- enum_extnumber(Identifiers,T2,[{Id,Num}|Acc],Counts);
-enum_extnumber([H|T],[],Acc,[C|Counts]) ->
- enum_extnumber(T,[],[{H,C}|Acc],Counts).
-
-enum_counts([]) ->
- [0];
-enum_counts(L) ->
- Used=[I||{_,I}<-L],
- AddEnumLb = lists:max(Used) + 1,
- lists:foldl(fun(El,AccIn)->lists:delete(El,AccIn) end,
- lists:seq(0,AddEnumLb),
- Used).
-check_add_enum_numbers(L, Cnt) ->
- Max = lists:max(Cnt),
- Fun = fun({_,N}=El) when N < Max ->
- case lists:member(N,Cnt) of
- false ->
- exit({error,{asn1,"AdditionalEnumeration element with same number as root element",El}});
- _ ->
- ok
- end;
- (_) ->
- ok
- end,
- lists:foreach(Fun,L).
+%%%
+%%% Check ENUMERATED.
+%%%
+check_enumerated(_S, [{Name,Number}|_]=NNL)
+ when is_atom(Name), is_integer(Number) ->
+ %% Already checked.
+ NNL;
+check_enumerated(_S, {[{Name,Number}|_],L}=NNL)
+ when is_atom(Name), is_integer(Number), is_list(L) ->
+ %% Already checked (with extension).
+ NNL;
+check_enumerated(S, NNL) ->
+ check_enum_ids(S, NNL, gb_sets:empty()),
+ check_enum(S, NNL, gb_sets:empty(), []).
+
+check_enum_ids(S, [{'NamedNumber',Id,_}|T], Ids0) ->
+ Ids = check_enum_update_ids(S, Id, Ids0),
+ check_enum_ids(S, T, Ids);
+check_enum_ids(S, ['EXTENSIONMARK'|T], Ids) ->
+ check_enum_ids(S, T, Ids);
+check_enum_ids(S, [Id|T], Ids0) when is_atom(Id) ->
+ Ids = check_enum_update_ids(S, Id, Ids0),
+ check_enum_ids(S, T, Ids);
+check_enum_ids(_, [], _) ->
+ ok.
+
+check_enum(S, [{'NamedNumber',Id,N}|T], Used0, Acc) ->
+ Used = check_enum_update_used(S, Id, N, Used0),
+ check_enum(S, T, Used, [{Id,N}|Acc]);
+check_enum(S, ['EXTENSIONMARK'|Ext0], Used0, Acc0) ->
+ Acc = lists:reverse(Acc0),
+ {Root,Used,Cnt} = check_enum_number_root(Acc, Used0, 0, []),
+ Ext = check_enum_ext(S, Ext0, Used, Cnt, []),
+ {Root,Ext};
+check_enum(S, [Id|T], Used, Acc) when is_atom(Id) ->
+ check_enum(S, T, Used, [Id|Acc]);
+check_enum(_, [], Used, Acc0) ->
+ Acc = lists:reverse(Acc0),
+ {Root,_,_} = check_enum_number_root(Acc, Used, 0, []),
+ lists:keysort(2, Root).
+
+check_enum_number_root([Id|T]=T0, Used0, Cnt, Acc) when is_atom(Id) ->
+ case gb_sets:is_element(Cnt, Used0) of
+ false ->
+ Used = gb_sets:insert(Cnt, Used0),
+ check_enum_number_root(T, Used, Cnt+1, [{Id,Cnt}|Acc]);
+ true ->
+ check_enum_number_root(T0, Used0, Cnt+1, Acc)
+ end;
+check_enum_number_root([H|T], Used, Cnt, Acc) ->
+ check_enum_number_root(T, Used, Cnt, [H|Acc]);
+check_enum_number_root([], Used, Cnt, Acc) ->
+ {lists:keysort(2, Acc),Used,Cnt}.
+
+check_enum_ext(S, [{'NamedNumber',Id,N}|T], Used0, C, Acc) ->
+ Used = check_enum_update_used(S, Id, N, Used0),
+ if
+ N < C ->
+ asn1_error(S, {enum_not_ascending,Id,N,C-1});
+ true ->
+ ok
+ end,
+ check_enum_ext(S, T, Used, N+1, [{Id,N}|Acc]);
+check_enum_ext(S, [Id|T]=T0, Used0, C, Acc) when is_atom(Id) ->
+ case gb_sets:is_element(C, Used0) of
+ true ->
+ check_enum_ext(S, T0, Used0, C+1, Acc);
+ false ->
+ Used = gb_sets:insert(C, Used0),
+ check_enum_ext(S, T, Used, C+1, [{Id,C}|Acc])
+ end;
+check_enum_ext(_, [], _, _, Acc) ->
+ lists:keysort(2, Acc).
+
+check_enum_update_ids(S, Id, Ids) ->
+ case gb_sets:is_element(Id, Ids) of
+ false ->
+ gb_sets:insert(Id, Ids);
+ true ->
+ asn1_error(S, {enum_illegal_redefinition,Id})
+ end.
+
+check_enum_update_used(S, Id, N, Used) ->
+ case gb_sets:is_element(N, Used) of
+ false ->
+ gb_sets:insert(N, Used);
+ true ->
+ asn1_error(S, {enum_reused_value,Id,N})
+ end.
+
+%%%
+%%% End of ENUMERATED checking.
+%%%
check_boolean(_S,_Constr) ->
ok.
@@ -5145,7 +4376,7 @@ check_sequence(S,Type,Comps) ->
CompListTuple = complist_as_tuple(NewComps4),
{CRelInf,CompListTuple};
Dupl ->
- throw({error,{asn1,{duplicate_components,Dupl}}})
+ asn1_error(S, {duplicate_identifier, error_value(hd(Dupl))})
end.
complist_as_tuple(CompList) ->
@@ -5155,8 +4386,6 @@ complist_as_tuple([#'EXTENSIONMARK'{}|T], Acc, Ext, Acc2, root) ->
complist_as_tuple(T, Acc, Ext, Acc2, ext);
complist_as_tuple([#'EXTENSIONMARK'{}|T], Acc, Ext, Acc2, ext) ->
complist_as_tuple(T, Acc, Ext, Acc2, root2);
-complist_as_tuple([#'EXTENSIONMARK'{}|_T], _Acc, _Ext, _Acc2, root2) ->
- throw({error,{asn1,{too_many_extension_marks}}});
complist_as_tuple([C|T], Acc, Ext, Acc2, root) ->
complist_as_tuple(T, [C|Acc], Ext, Acc2, root);
complist_as_tuple([C|T], Acc, Ext, Acc2, ext) ->
@@ -5199,11 +4428,11 @@ expand_components2(S,{_,PT={pt,_,_}}) ->
expand_components2(S,{_,OCFT = #'ObjectClassFieldType'{}}) ->
UncheckedType = #type{def=OCFT},
Type = check_type(S,#typedef{typespec=UncheckedType},UncheckedType),
- expand_components2(S,{undefined,oCFT_def(S,Type)});
+ expand_components2(S, {undefined,ocft_def(Type)});
expand_components2(S,{_,ERef}) when is_record(ERef,'Externaltypereference') ->
expand_components2(S,get_referenced_type(S,ERef));
-expand_components2(_S,Err) ->
- throw({error,{asn1,{illegal_COMPONENTS_OF,Err}}}).
+expand_components2(S,{_, What}) ->
+ asn1_error(S, {illegal_COMPONENTS_OF, error_value(What)}).
take_only_rootset([])->
[];
@@ -5252,7 +4481,7 @@ check_sequenceof(S,Type,Component) when is_record(Component,type) ->
check_set(S,Type,Components) ->
{TableCInf,NewComponents} = check_sequence(S,Type,Components),
- check_distinct_tags(NewComponents,[]),
+ check_unique_tags(S, collect_components(NewComponents), []),
case {lists:member(der,S#state.options),S#state.erule} of
{true,_} ->
{Sorted,SortedComponents} = sort_components(der,S,NewComponents),
@@ -5264,35 +4493,21 @@ check_set(S,Type,Components) ->
{false,TableCInf,NewComponents}
end.
-
-%% check that all tags are distinct according to X.680 26.3
-check_distinct_tags({C1,C2,C3},Acc) when is_list(C1),is_list(C2),is_list(C3) ->
- check_distinct_tags(C1++C2++C3,Acc);
-check_distinct_tags({C1,C2},Acc) when is_list(C1),is_list(C2) ->
- check_distinct_tags(C1++C2,Acc);
-check_distinct_tags([#'ComponentType'{tags=[T]}|Cs],Acc) ->
- check_distinct(T,Acc),
- check_distinct_tags(Cs,[T|Acc]);
-check_distinct_tags([C=#'ComponentType'{tags=[T|Ts]}|Cs],Acc) ->
- check_distinct(T,Acc),
- check_distinct_tags([C#'ComponentType'{tags=Ts}|Cs],[T|Acc]);
-check_distinct_tags([#'ComponentType'{tags=[]}|_Cs],_Acc) ->
- throw({error,"Not distinct tags in SET"});
-check_distinct_tags([],_) ->
- ok.
-check_distinct(T,Acc) ->
- case lists:member(T,Acc) of
- true ->
- throw({error,"Not distinct tags in SET"});
- _ -> ok
- end.
+collect_components({C1,C2,C3}) ->
+ collect_components(C1++C2++C3);
+collect_components({C1,C2}) ->
+ collect_components(C1++C2);
+collect_components(Cs) ->
+ %% Assert that tags are not empty
+ [] = [EmptyTag || EmptyTag = #'ComponentType'{tags=[]} <- Cs],
+ Cs.
%% sorting in canonical order according to X.680 8.6, X.691 9.2
%% DER: all components shall be sorted in canonical order.
%% PER: only root components shall be sorted in canonical order. The
%% extension components shall remain in textual order.
%%
-sort_components(der,S=#state{tname=TypeName},Components) ->
+sort_components(der, S, Components) ->
{R1,Ext,R2} = extension(textual_order(Components)),
CompsList = case Ext of
noext -> R1;
@@ -5300,88 +4515,34 @@ sort_components(der,S=#state{tname=TypeName},Components) ->
end,
case {untagged_choice(S,CompsList),Ext} of
{false,noext} ->
- {true,sort_components1(S,TypeName,CompsList,[],[],[],[])};
+ {true,sort_components1(CompsList)};
{false,_} ->
- {true,{sort_components1(S,TypeName,CompsList,[],[],[],[]), []}};
+ {true,{sort_components1(CompsList),[]}};
{true,noext} ->
%% sort in run-time
{dynamic,R1};
_ ->
{dynamic,{R1, Ext, R2}}
end;
-sort_components(per,S=#state{tname=TypeName},Components) ->
+sort_components(per, S, Components) ->
{R1,Ext,R2} = extension(textual_order(Components)),
Root = tag_untagged_choice(S,R1++R2),
case Ext of
noext ->
- {true,sort_components1(S,TypeName,Root,[],[],[],[])};
+ {true,sort_components1(Root)};
_ ->
- {true,{sort_components1(S,TypeName,Root,[],[],[],[]),
- Ext}}
+ {true,{sort_components1(Root),Ext}}
end.
-sort_components1(S,TypeName,[C=#'ComponentType'{tags=[{'UNIVERSAL',_}|_R]}|Cs],
- UnivAcc,ApplAcc,ContAcc,PrivAcc) ->
- sort_components1(S,TypeName,Cs,[C|UnivAcc],ApplAcc,ContAcc,PrivAcc);
-sort_components1(S,TypeName,[C=#'ComponentType'{tags=[{'APPLICATION',_}|_R]}|Cs],
- UnivAcc,ApplAcc,ContAcc,PrivAcc) ->
- sort_components1(S,TypeName,Cs,UnivAcc,[C|ApplAcc],ContAcc,PrivAcc);
-sort_components1(S,TypeName,[C=#'ComponentType'{tags=[{'CONTEXT',_}|_R]}|Cs],
- UnivAcc,ApplAcc,ContAcc,PrivAcc) ->
- sort_components1(S,TypeName,Cs,UnivAcc,ApplAcc,[C|ContAcc],PrivAcc);
-sort_components1(S,TypeName,[C=#'ComponentType'{tags=[{'PRIVATE',_}|_R]}|Cs],
- UnivAcc,ApplAcc,ContAcc,PrivAcc) ->
- sort_components1(S,TypeName,Cs,UnivAcc,ApplAcc,ContAcc,[C|PrivAcc]);
-sort_components1(S,TypeName,[],UnivAcc,ApplAcc,ContAcc,PrivAcc) ->
- I = #'ComponentType'.tags,
- ascending_order_check(S,TypeName,sort_universal_type(UnivAcc)) ++
- ascending_order_check(S,TypeName,lists:keysort(I,ApplAcc)) ++
- ascending_order_check(S,TypeName,lists:keysort(I,ContAcc)) ++
- ascending_order_check(S,TypeName,lists:keysort(I,PrivAcc)).
-
-ascending_order_check(S,TypeName,Components) ->
- ascending_order_check1(S,TypeName,Components),
- Components.
-
-ascending_order_check1(S,TypeName,
- [C1 = #'ComponentType'{tags=[{_,T}|_]},
- C2 = #'ComponentType'{tags=[{_,T}|_]}|Rest]) ->
- asn1ct:warning("Indistinct tag ~p in SET ~p, components ~p and ~p~n",
- [T,TypeName,C1#'ComponentType'.name,C2#'ComponentType'.name],S,
- "Indistinct tag in SET"),
- ascending_order_check1(S,TypeName,[C2|Rest]);
-ascending_order_check1(S,TypeName,
- [C1 = #'ComponentType'{tags=[{'UNIVERSAL',T1}|_]},
- C2 = #'ComponentType'{tags=[{'UNIVERSAL',T2}|_]}|Rest]) ->
- case (decode_type(T1) == decode_type(T2)) of
- true ->
- asn1ct:warning("Indistinct tags ~p and ~p in"
- " SET ~p, components ~p and ~p~n",
- [T1,T2,TypeName,C1#'ComponentType'.name,
- C2#'ComponentType'.name],S,
- "Indistinct tags and in SET"),
- ascending_order_check1(S,TypeName,[C2|Rest]);
- _ ->
- ascending_order_check1(S,TypeName,[C2|Rest])
- end;
-ascending_order_check1(S,N,[_|Rest]) ->
- ascending_order_check1(S,N,Rest);
-ascending_order_check1(_,_,[]) ->
- ok.
-
-sort_universal_type(Components) ->
- List = lists:map(fun(C) ->
- #'ComponentType'{tags=[{_,T}|_]} = C,
- {decode_type(T),C}
- end,
- Components),
- SortedList = lists:keysort(1,List),
- lists:map(fun(X)->element(2,X) end,SortedList).
-
-decode_type(I) when is_integer(I) ->
- I;
-decode_type(T) ->
- asn1ct_gen_ber_bin_v2:decode_type(T).
+sort_components1(Cs0) ->
+ Cs1 = [{tag_key(Tag),C} || #'ComponentType'{tags=[Tag|_]}=C <- Cs0],
+ Cs = lists:sort(Cs1),
+ [C || {_,C} <- Cs].
+
+tag_key({'UNIVERSAL',Tag}) -> {0,Tag};
+tag_key({'APPLICATION',Tag}) -> {1,Tag};
+tag_key({'CONTEXT',Tag}) -> {2,Tag};
+tag_key({'PRIVATE',Tag}) -> {3,Tag}.
untagged_choice(_S,[#'ComponentType'{typespec=#type{tag=[],def={'CHOICE',_}}}|_Rest]) ->
true;
@@ -5477,35 +4638,43 @@ check_selectiontype(S,Name,#type{def=Eref})
{RefMod,TypeDef} = get_referenced_type(S,Eref),
NewS = S#state{module=load_asn1_module(S,RefMod),
mname=RefMod,
- type=TypeDef,
tname=get_datastr_name(TypeDef)},
check_selectiontype2(NewS,Name,TypeDef);
check_selectiontype(S,Name,Type=#type{def={pt,_,_}}) ->
- TName =
- case S#state.recordtopname of
- [] ->
- S#state.tname;
- N -> N
- end,
+ TName = case S#state.recordtopname of
+ [] -> S#state.tname;
+ N -> N
+ end,
TDef = #typedef{name=TName,typespec=Type},
check_selectiontype2(S,Name,TDef);
-check_selectiontype(S,Name,Type) ->
- Msg = lists:flatten(io_lib:format("SelectionType error: ~w < ~w must be a reference to a CHOICE.",[Name,Type])),
- error({type,Msg,S}).
+check_selectiontype(S, _Name, Type) ->
+ asn1_error(S, {illegal_choice_type, error_value(Type)}).
check_selectiontype2(S,Name,TypeDef) ->
NewS = S#state{recordtopname=get_datastr_name(TypeDef)},
- CheckedType = check_type(NewS,TypeDef,TypeDef#typedef.typespec),
- Components = get_choice_components(S,CheckedType#type.def),
- case lists:keysearch(Name,#'ComponentType'.name,Components) of
- {value,C} ->
- %% The selected type will have the tag of the selected type.
- _T = C#'ComponentType'.typespec;
-% T#type{tag=def_to_tag(NewS,T#type.def)};
- _ ->
- Msg = lists:flatten(io_lib:format("error checking SelectionType: ~w~n",[Name])),
- error({type,Msg,S})
+ Components =
+ try
+ CheckedType = check_type(NewS,TypeDef,TypeDef#typedef.typespec),
+ get_choice_components(S,CheckedType#type.def)
+ catch error:_ ->
+ asn1_error(S, {illegal_choice_type, error_value(TypeDef)})
+ end,
+ case lists:keyfind(Name, #'ComponentType'.name, Components) of
+ #'ComponentType'{typespec=TS} -> TS;
+ false -> asn1_error(S, {illegal_id, error_value(Name)})
end.
+
+
+get_choice_components(_S,{'CHOICE',Components}) when is_list(Components)->
+ Components;
+get_choice_components(_S,{'CHOICE',{C1,C2}}) when is_list(C1),is_list(C2) ->
+ C1++C2;
+get_choice_components(S,ERef=#'Externaltypereference'{}) ->
+ {_RefMod,TypeDef}=get_referenced_type(S,ERef),
+ #typedef{typespec=TS} = TypeDef,
+ get_choice_components(S,TS#type.def).
+
+
check_restrictedstring(_S,_Def,_Constr) ->
ok.
@@ -5538,7 +4707,7 @@ check_choice(S,Type,Components) when is_list(Components) ->
check_unique_tags(S, NewComps3),
complist_as_tuple(NewComps3);
Dupl ->
- throw({error,{asn1,{duplicate_choice_alternatives,Dupl}}})
+ asn1_error(S, {duplicate_identifier,error_value(hd(Dupl))})
end;
check_choice(_S,_,[]) ->
[].
@@ -5635,25 +4804,30 @@ check_unique_tags(S,C) ->
case (S#state.module)#module.tagdefault of
'AUTOMATIC' ->
case any_manual_tag(C) of
- false -> true;
- _ -> collect_and_sort_tags(C,[])
+ false ->
+ true;
+ true ->
+ check_unique_tags(S, C, [])
end;
_ ->
- collect_and_sort_tags(C,[])
+ check_unique_tags(S, C, [])
end.
-collect_and_sort_tags([C|Rest],Acc) when is_record(C,'ComponentType') ->
- collect_and_sort_tags(Rest,C#'ComponentType'.tags ++ Acc);
-collect_and_sort_tags([_|Rest],Acc) ->
- collect_and_sort_tags(Rest,Acc);
-collect_and_sort_tags([],Acc) ->
- {Dupl,_}= lists:mapfoldl(fun(El,El)->{{dup,El},El};(El,_Prev)-> {El,El} end,notag,lists:sort(Acc)),
- Dupl2 = [Dup|| {dup,Dup} <- Dupl],
- if
- length(Dupl2) > 0 ->
- throw({error,{asn1,{duplicates_of_the_tags,Dupl2}}});
- true ->
- true
+check_unique_tags(S, [#'ComponentType'{name=Name,tags=Tags0}|T], Acc) ->
+ Tags = [{Tag,Name} || Tag <- Tags0],
+ check_unique_tags(S, T, Tags ++ Acc);
+check_unique_tags(S, [_|T], Acc) ->
+ check_unique_tags(S, T, Acc);
+check_unique_tags(S, [], Acc) ->
+ R0 = sofs:relation(Acc),
+ R1 = sofs:relation_to_family(R0),
+ R2 = sofs:to_external(R1),
+ Dup = [Els || {_,[_,_|_]=Els} <- R2],
+ case Dup of
+ [] ->
+ ok;
+ [FirstDupl|_] ->
+ asn1_error(S, {duplicate_tags,FirstDupl})
end.
check_unique(L,Pos) ->
@@ -5795,28 +4969,18 @@ componentrelation_leadingattr(S,[C= #'ComponentType'{}|Cs],CompList,Acc,CompAcc)
{[],C};
[{ObjSet,Attr,N,ClassDef,_Path,ValueIndex}|_NewRest] ->
OS = object_set_mod_name(S,ObjSet),
- UniqueFieldName =
- case (catch get_unique_fieldname(S,#classdef{typespec=ClassDef})) of
- {error,'__undefined_',_} ->
- no_unique;
- {asn1,Msg,_} ->
- error({type,Msg,S});
- {'EXIT',Msg} ->
- error({type,{internal_error,Msg},S});
- {Other,_} -> Other
- end,
-% UsedFieldName = get_used_fieldname(S,Attr,STList),
+ UniqFN = get_unique_fieldname(S,
+ #classdef{typespec=ClassDef}),
%% Res should be done differently: even though
%% a unique field name exists it is not
%% certain that the ObjectClassFieldType of
%% the simple table constraint picks that
%% class field.
Res = #simpletableattributes{objectsetname=OS,
-%% c_name=asn1ct_gen:un_hyphen_var(Attr),
c_name=Attr,
c_index=N,
- usedclassfield=UniqueFieldName,
- uniqueclassfield=UniqueFieldName,
+ usedclassfield=UniqFN,
+ uniqueclassfield=UniqFN,
valueindex=ValueIndex},
{[Res],C#'ComponentType'{typespec=NewTSpec}}
end;
@@ -5869,7 +5033,7 @@ remove_doubles1(El,L) ->
NewL -> remove_doubles1(El,NewL)
end.
-%% get_simple_table_info searches the commponents Cs by the path from
+%% get_simple_table_info searches the components Cs by the path from
%% an at-list (third argument), and follows into a component of it if
%% necessary, to get information needed for code generating.
%%
@@ -5884,32 +5048,35 @@ remove_doubles1(El,L) ->
% %% at least one step below the outermost level, i.e. the leading
% %% information shall be on a sub level. 2) They don't have any common
% %% path.
-get_simple_table_info(S,Cs,[AtList|Rest]) ->
- [get_simple_table_info1(S,Cs,AtList,[])|get_simple_table_info(S,Cs,Rest)];
-get_simple_table_info(_,_,[]) ->
- [].
-get_simple_table_info1(S,Cs,[Cname|Cnames],Path) when is_list(Cs) ->
- case lists:keysearch(Cname,#'ComponentType'.name,Cs) of
- {value,C} ->
- get_simple_table_info1(S,C,Cnames,[Cname|Path]);
- _ ->
- error({type,"Missing expected simple table constraint",S})
- end;
-get_simple_table_info1(S,#'ComponentType'{typespec=TS},[],Path) ->
- %% In this component there must be a simple table constraint
- %% o.w. the asn1 code is wrong.
- #type{def=OCFT,constraint=Cnstr} = TS,
- case constraint_member(simpletable,Cnstr) of
- {true,{simpletable,_OSRef}} ->
- simple_table_info(S,OCFT,Path);
- _ ->
- error({type,{"missing expected simple table constraint",
- Cnstr},S})
+get_simple_table_info(S, Cs, AtLists) ->
+ [get_simple_table_info1(S, Cs, AtList, []) || AtList <- AtLists].
+
+get_simple_table_info1(S, Cs, [Cname|Cnames], Path) ->
+ #'ComponentType'{} = C =
+ lists:keyfind(Cname, #'ComponentType'.name, Cs),
+ get_simple_table_info2(S, C, Cnames, [Cname|Path]).
+
+get_simple_table_info2(S, #'ComponentType'{name=Name,typespec=TS}, [], Path) ->
+ OCFT = simple_table_get_ocft(S, Name, TS),
+ case lists:keymember(simpletable, 1, TS#type.constraint) of
+ true ->
+ simple_table_info(S, OCFT, Path);
+ false ->
+ asn1_error(S, {missing_table_constraint,Name})
end;
-get_simple_table_info1(S,#'ComponentType'{typespec=TS},Cnames,Path) ->
+get_simple_table_info2(S, #'ComponentType'{typespec=TS}, Cnames, Path) ->
Components = get_atlist_components(TS#type.def),
- get_simple_table_info1(S,Components,Cnames,Path).
-
+ get_simple_table_info1(S, Components, Cnames, Path).
+
+simple_table_get_ocft(_, _, #type{def=#'ObjectClassFieldType'{}=OCFT}) ->
+ OCFT;
+simple_table_get_ocft(S, Component, #type{constraint=Constr}) ->
+ case lists:keyfind(ocft, 1, Constr) of
+ {ocft,OCFT} ->
+ OCFT;
+ false ->
+ asn1_error(S, {missing_ocft,Component})
+ end.
simple_table_info(S,#'ObjectClassFieldType'{classname=ClRef,
class=ObjectClass,
@@ -5932,19 +5099,8 @@ simple_table_info(S,#'ObjectClassFieldType'{classname=ClRef,
CDef;
_ -> #classdef{typespec=ObjectClass}
end,
- UniqueName =
- case (catch get_unique_fieldname(S,ClassDef)) of
- {error,'__undefined_',_} -> no_unique;
- {asn1,Msg,_} ->
- error({type,Msg,S});
- {'EXIT',Msg} ->
- error({type,{internal_error,Msg},S});
- {Other,_} -> Other
- end,
- {lists:reverse(Path),ObjectClassFieldName,UniqueName};
-simple_table_info(S,Type,_) ->
- error({type,{"the type referenced by a componentrelation constraint must be a ObjectClassFieldType",Type},S}).
-
+ UniqueName = get_unique_fieldname(S, ClassDef),
+ {lists:reverse(Path),ObjectClassFieldName,UniqueName}.
%% any_component_relation searches for all component relation
%% constraints that refers to the actual level and returns a list of
@@ -5958,9 +5114,8 @@ simple_table_info(S,Type,_) ->
%% is found to check the validity of the at-list.
any_component_relation(S,[#'ComponentType'{name=CName,typespec=Type}|Cs],CNames,NamePath,Acc) ->
CRelPath =
- case constraint_member(componentrelation,Type#type.constraint) of
-%% [{componentrelation,_,AtNotation}] ->
- {true,{_,_,AtNotation}} ->
+ case lists:keyfind(componentrelation, 1, Type#type.constraint) of
+ {_,_,AtNotation} ->
%% Found component relation constraint, now check
%% whether this constraint is relevant for the level
%% where the search started
@@ -5969,7 +5124,7 @@ any_component_relation(S,[#'ComponentType'{name=CName,typespec=Type}|Cs],CNames,
%% simple table constraint from where the component
%% relation is found.
evaluate_atpath(S,NamePath,CNames,AtNot);
- _ ->
+ false ->
[]
end,
InnerAcc =
@@ -5991,11 +5146,11 @@ any_component_relation(S,[#'ComponentType'{name=CName,typespec=Type}|Cs],CNames,
any_component_relation(S,Cs,CNames,NamePath,InnerAcc++CRelPath++Acc);
any_component_relation(S,Type,CNames,NamePath,Acc) when is_record(Type,type) ->
CRelPath =
- case constraint_member(componentrelation,Type#type.constraint) of
- {true,{_,_,AtNotation}} ->
+ case lists:keyfind(componentrelation, 1, Type#type.constraint) of
+ {_,_,AtNotation} ->
AtNot = extract_at_notation(AtNotation),
evaluate_atpath(S,NamePath,CNames,AtNot);
- _ ->
+ false ->
[]
end,
InnerAcc =
@@ -6017,15 +5172,6 @@ any_component_relation(S,['ExtensionAdditionGroupEnd'|Cs],CNames,NamePath,Acc) -
any_component_relation(_,[],_,_,Acc) ->
Acc.
-constraint_member(componentrelation,[CRel={componentrelation,_,_}|_Rest]) ->
- {true,CRel};
-constraint_member(simpletable,[ST={simpletable,_}|_Rest]) ->
- {true,ST};
-constraint_member(Key,[_H|T]) ->
- constraint_member(Key,T);
-constraint_member(_,[]) ->
- false.
-
%% evaluate_atpath/4 finds out whether the at notation refers to the
%% search level. The list of referenced names in the AtNot list shall
%% begin with a name that exists on the level it refers to. If the
@@ -6059,9 +5205,7 @@ evaluate_atpath(S=#state{abscomppath=TopPath},NamePath,Cnames,{outermost,AtPath=
{_,[H|_T]} ->
case lists:member(H,Cnames) of
true -> [AtPathBelowTop];
- _ ->
- %% error({type,{asn1,"failed to analyze at-path",AtPath},S})
- throw({type,{asn1,"failed to analyze at-path",AtPath},S})
+ _ -> asn1_error(S, {invalid_at_path, AtPath})
end
end;
evaluate_atpath(_,_,_,_) ->
@@ -6098,23 +5242,8 @@ tuple2complist({R1,E,R2}) ->
tuple2complist(List) when is_list(List) ->
List.
-get_choice_components(_S,{'CHOICE',Components}) when is_list(Components)->
- Components;
-get_choice_components(_S,{'CHOICE',{C1,C2}}) when is_list(C1),is_list(C2) ->
- C1++C2;
-get_choice_components(S,ERef=#'Externaltypereference'{}) ->
- {_RefMod,TypeDef}=get_referenced_type(S,ERef),
- #typedef{typespec=TS} = TypeDef,
- get_choice_components(S,TS#type.def).
-
-extract_at_notation([{Level,[#'Externalvaluereference'{value=Name}|Rest]}]) ->
- {Level,[Name|extract_at_notation1(Rest)]};
-extract_at_notation(At) ->
- exit({error,{asn1,{at_notation,At}}}).
-extract_at_notation1([#'Externalvaluereference'{value=Name}|Rest]) ->
- [Name|extract_at_notation1(Rest)];
-extract_at_notation1([]) ->
- [].
+extract_at_notation([{Level,ValueRefs}]) ->
+ {Level,[Name || #'Externalvaluereference'{value=Name} <- ValueRefs]}.
%% componentrelation1/1 identifies all componentrelation constraints
%% that exist in C or in the substructure of C. Info about the found
@@ -6133,8 +5262,8 @@ componentrelation1(S,C = #type{def=Def,constraint=Constraint,tablecinf=TCI},
Ret =
% case Constraint of
% [{componentrelation,{_,_,ObjectSet},AtList}|_Rest] ->
- case constraint_member(componentrelation,Constraint) of
- {true,{_,{_,_,ObjectSet},AtList}} ->
+ case lists:keyfind(componentrelation, 1, Constraint) of
+ {_,{_,_,ObjectSet},AtList} ->
[{_,AL=[#'Externalvaluereference'{}|_R1]}|_R2] = AtList,
%% Note: if Path is longer than one,i.e. it is within
%% an inner type of the actual level, then the only
@@ -6145,7 +5274,7 @@ componentrelation1(S,C = #type{def=Def,constraint=Constraint,tablecinf=TCI},
lists:map(fun(#'Externalvaluereference'{value=V})->V end,
AL),
{[{ObjectSet,AtPath,ClassDef,Path}],Def};
- _ ->
+ false ->
%% check the inner type of component
innertype_comprel(S,Def,Path)
end,
@@ -6219,10 +5348,8 @@ componentlist_comprel(_,[],Acc,_,NewCL) ->
innertype_comprel1(S,T = #type{def=Def,constraint=Cons,tablecinf=TCI},Path) ->
Ret =
-% case Cons of
-% [{componentrelation,{_,_,ObjectSet},AtList}|_Rest] ->
- case constraint_member(componentrelation,Cons) of
- {true,{_,{_,_,ObjectSet},AtList}} ->
+ case lists:keyfind(componentrelation, 1, Cons) of
+ {_,{_,_,ObjectSet},AtList} ->
%% This AtList must have an "outermost" at sign to be
%% relevent here.
[{_,AL=[#'Externalvaluereference'{value=_Attr}|_R1]}|_R2]
@@ -6233,7 +5360,7 @@ innertype_comprel1(S,T = #type{def=Def,constraint=Cons,tablecinf=TCI},Path) ->
lists:map(fun(#'Externalvaluereference'{value=V})->V end,
AL),
[{ObjectSet,AtPath,ClassDef,Path}];
- _ ->
+ false ->
innertype_comprel(S,Def,Path)
end,
case Ret of
@@ -6301,8 +5428,7 @@ value_match(S,#'ComponentType'{typespec=Type},Name,[At|Ats],Acc) ->
InnerType = asn1ct_gen:get_inner(Type#type.def),
Components =
case get_atlist_components(Type#type.def) of
- [] -> error({type,{asn1,"element in at list must be a "
- "SEQUENCE, SET or CHOICE.",Name},S});
+ [] -> asn1_error(S, {invalid_element, Name});
Comps -> Comps
end,
{Index,ValueIndex} = component_value_index(S,InnerType,At,Components),
@@ -6322,29 +5448,27 @@ component_index1(_S,Name,[#'ComponentType'{name=Name}|_Cs],N) ->
component_index1(S,Name,[_C|Cs],N) ->
component_index1(S,Name,Cs,N+1);
component_index1(S,Name,[],_) ->
- error({type,{asn1,"component of at-list was not"
- " found in substructure",Name},S}).
+ asn1_error(S, {invalid_at_list, Name}).
-get_unique_fieldname(_S,ClassDef) when is_record(ClassDef,classdef) ->
-%% {_,Fields,_} = ClassDef#classdef.typespec,
- Fields = (ClassDef#classdef.typespec)#objectclass.fields,
- get_unique_fieldname1(Fields,[]);
+get_unique_fieldname(S, #classdef{typespec=TS}) ->
+ Fields = TS#objectclass.fields,
+ get_unique_fieldname1(S, Fields, []);
get_unique_fieldname(S,#typedef{typespec=#type{def=ClassRef}}) ->
%% A class definition may be referenced as
%% REFED-CLASS ::= DEFINED-CLASS and then REFED-CLASS is a typedef
{_M,ClassDef} = get_referenced_type(S,ClassRef),
get_unique_fieldname(S,ClassDef).
-get_unique_fieldname1([],[]) ->
- throw({error,'__undefined_',[]});
-get_unique_fieldname1([],[Name]) ->
- Name;
-get_unique_fieldname1([],Acc) ->
- throw({asn1,'only one UNIQUE field is allowed in CLASS',Acc});
-get_unique_fieldname1([{fixedtypevaluefield,Name,_,'UNIQUE',Opt}|Rest],Acc) ->
- get_unique_fieldname1(Rest,[{Name,Opt}|Acc]);
-get_unique_fieldname1([_H|T],Acc) ->
- get_unique_fieldname1(T,Acc).
+get_unique_fieldname1(S, [{fixedtypevaluefield,Name,_,'UNIQUE',Opt}|T], Acc) ->
+ get_unique_fieldname1(S, T, [{Name,Opt}|Acc]);
+get_unique_fieldname1(S, [_|T], Acc) ->
+ get_unique_fieldname1(S, T, Acc);
+get_unique_fieldname1(S, [], Acc) ->
+ case Acc of
+ [] -> no_unique;
+ [Name] -> Name;
+ [_|_] -> asn1_error(S, multiple_uniqs)
+ end.
get_tableconstraint_info(S,Type,{CheckedTs,EComps,CheckedTs2}) ->
{get_tableconstraint_info(S,Type,CheckedTs,[]),
@@ -6400,31 +5524,8 @@ get_tableconstraint_info(S,Type,[C|Cs],Acc) ->
get_referenced_fieldname([{_,FirstFieldname}]) ->
{FirstFieldname,[]};
-get_referenced_fieldname([{_,FirstFieldname}|Rest]) ->
- {FirstFieldname,lists:map(fun(X)->element(2,X) end,Rest)};
-get_referenced_fieldname(Def={FieldName,RestFieldName}) when is_atom(FieldName),is_list(RestFieldName)->
- Def;
-get_referenced_fieldname(Def) ->
- {no_type,Def}.
-
-%% get_ObjectClassFieldType extracts the type from the chain of
-%% objects that leads to a final type.
-get_ObjectClassFieldType(S,ERef,PrimFieldNameList) when
- is_record(ERef,'Externaltypereference') ->
- {MName,Type} = get_referenced_type(S,ERef),
- NewS = update_state(S#state{type=Type,
- tname=ERef#'Externaltypereference'.type},MName),
- ClassSpec = check_class(NewS,Type),
- Fields = ClassSpec#objectclass.fields,
- get_ObjectClassFieldType(S,Fields,PrimFieldNameList);
-get_ObjectClassFieldType(S,Fields,L=[_PrimFieldName1|_Rest]) ->
- check_PrimitiveFieldNames(S,Fields,L),
- get_OCFType(S,Fields,L);
-get_ObjectClassFieldType(S,ERef,{FieldName,Rest}) ->
- get_ObjectClassFieldType(S,ERef,Rest ++ [FieldName]).
-
-check_PrimitiveFieldNames(_S,_Fields,_) ->
- ok.
+get_referenced_fieldname([{_,FirstFieldname}|T]) ->
+ {FirstFieldname,[element(2, X) || X <- T]}.
%% get_ObjectClassFieldType_classdef gets the def of the class of the
%% ObjectClassFieldType, i.e. the objectclass record. If the type has
@@ -6445,15 +5546,13 @@ get_OCFType(S,Fields,[PrimFieldName|Rest]) ->
{fixedtypevaluefield,PrimFieldName,Type};
{value,{objectfield,_,ClassRef,_Unique,_OptSpec}} ->
{MName,ClassDef} = get_referenced_type(S,ClassRef),
- NewS = update_state(S#state{type=ClassDef,
- tname=get_datastr_name(ClassDef)},
+ NewS = update_state(S#state{tname=get_datastr_name(ClassDef)},
MName),
CheckedCDef = check_class(NewS,ClassDef),
get_OCFType(S,CheckedCDef#objectclass.fields,Rest);
{value,{objectsetfield,_,Type,_OptSpec}} ->
{MName,ClassDef} = get_referenced_type(S,Type#type.def),
- NewS = update_state(S#state{type=ClassDef,
- tname=get_datastr_name(ClassDef)},
+ NewS = update_state(S#state{tname=get_datastr_name(ClassDef)},
MName),
CheckedCDef = check_class(NewS,ClassDef),
get_OCFType(S,CheckedCDef#objectclass.fields,Rest);
@@ -6461,7 +5560,7 @@ get_OCFType(S,Fields,[PrimFieldName|Rest]) ->
{value,Other} ->
{element(1,Other),PrimFieldName};
_ ->
- throw({error,lists:flatten(io_lib:format("undefined FieldName in ObjectClassFieldType: ~w",[PrimFieldName]))})
+ asn1_error(S, {illegal_object_field, PrimFieldName})
end.
get_taglist(S,Ext) when is_record(Ext,'Externaltypereference') ->
@@ -6485,30 +5584,8 @@ get_taglist(_S,#'ObjectClassFieldType'{type={typefield,_}}) ->
[];
get_taglist(S,#'ObjectClassFieldType'{type={fixedtypevaluefield,_,Type}}) ->
get_taglist(S,Type);
-get_taglist(S,{ERef=#'Externaltypereference'{},FieldNameList})
- when is_list(FieldNameList) ->
- case get_ObjectClassFieldType(S,ERef,FieldNameList) of
- {fixedtypevaluefield,_,Type} -> get_taglist(S,Type);
- {TypeFieldName,_} when is_atom(TypeFieldName) -> []%should check if allowed
- end;
-get_taglist(S,{ObjCl,FieldNameList}) when is_record(ObjCl,objectclass),
- is_list(FieldNameList) ->
- case get_ObjectClassFieldType(S,ObjCl#objectclass.fields,FieldNameList) of
- {fixedtypevaluefield,_,Type} -> get_taglist(S,Type);
- {TypeFieldName,_} when is_atom(TypeFieldName) -> []%should check if allowed
- end;
-get_taglist(S,Def) ->
- case S#state.erule of
- ber ->
- [];
- _ ->
- case Def of
- 'ASN1_OPEN_TYPE' -> % open_type has no UNIVERSAL tag as such
- [];
- _ ->
- [asn1ct_gen:def_to_tag(Def)]
- end
- end.
+get_taglist(_, _) ->
+ [].
get_taglist1(S,[#'ComponentType'{name=_Cname,tags=TagL}|Rest]) when is_list(TagL) ->
%% tag_list has been here , just return TagL and continue with next alternative
@@ -6565,15 +5642,6 @@ get_taglist1(_S,[]) ->
%% tag_number('CHARACTER STRING') -> 29;
%% tag_number('BMPString') -> 30.
-
-dbget_ex(_S,Module,Key) ->
- case asn1_db:dbget(Module,Key) of
- undefined ->
-
- throw({error,{asn1,{undefined,{Module,Key}}}}); % this is catched on toplevel type or value
- T -> T
- end.
-
merge_tags(T1, T2) when is_list(T2) ->
merge_tags2(T1 ++ T2, []);
merge_tags(T1, T2) ->
@@ -6590,75 +5658,46 @@ merge_tags2([H|T],Acc) ->
merge_tags2([], Acc) ->
lists:reverse(Acc).
-%% merge_constraints(C1, []) ->
-%% C1;
-%% merge_constraints([], C2) ->
-%% C2;
-%% merge_constraints(C1, C2) ->
-%% {SList,VList,PAList,Rest} = splitlist(C1++C2,[],[],[],[]),
-%% SizeC = merge_constraints(SList),
-%% ValueC = merge_constraints(VList),
-%% PermAlphaC = merge_constraints(PAList),
-%% case Rest of
-%% [] ->
-%% SizeC ++ ValueC ++ PermAlphaC;
-%% _ ->
-%% throw({error,{asn1,{not_implemented,{merge_constraints,Rest}}}})
-%% end.
-
-%% merge_constraints([]) -> [];
-%% merge_constraints([C1 = {_,{Low1,High1}},{_,{Low2,High2}}|Rest]) when Low1 >= Low2,
-%% High1 =< High2 ->
-%% merge_constraints([C1|Rest]);
-%% merge_constraints([C1={'PermittedAlphabet',_},C2|Rest]) ->
-%% [C1|merge_constraints([C2|Rest])];
-%% merge_constraints([C1 = {_,{_Low1,_High1}},C2 = {_,{_Low2,_High2}}|_Rest]) ->
-%% throw({error,asn1,{conflicting_constraints,{C1,C2}}});
-%% merge_constraints([C]) ->
-%% [C].
-
-%% splitlist([C={'SizeConstraint',_}|Rest],Sacc,Vacc,PAacc,Restacc) ->
-%% splitlist(Rest,[C|Sacc],Vacc,PAacc,Restacc);
-%% splitlist([C={'ValueRange',_}|Rest],Sacc,Vacc,PAacc,Restacc) ->
-%% splitlist(Rest,Sacc,[C|Vacc],PAacc,Restacc);
-%% splitlist([C={'PermittedAlphabet',_}|Rest],Sacc,Vacc,PAacc,Restacc) ->
-%% splitlist(Rest,Sacc,Vacc,[C|PAacc],Restacc);
-%% splitlist([C|Rest],Sacc,Vacc,PAacc,Restacc) ->
-%% splitlist(Rest,Sacc,Vacc,PAacc,[C|Restacc]);
-%% splitlist([],Sacc,Vacc,PAacc,Restacc) ->
-%% {lists:reverse(Sacc),
-%% lists:reverse(Vacc),
-%% lists:reverse(PAacc),
-%% lists:reverse(Restacc)}.
-
-
-
-storeindb(S,M) when is_record(M,module) ->
- TVlist = M#module.typeorval,
- NewM = M#module{typeorval=findtypes_and_values(TVlist)},
- asn1_db:dbnew(NewM#module.name, S#state.erule),
- asn1_db:dbput(NewM#module.name,'MODULE', NewM),
- Res = storeindb(#state{mname=NewM#module.name}, TVlist, []),
- include_default_class(S,NewM#module.name),
+storeindb(S0, #module{name=ModName,typeorval=TVlist0}=M) ->
+ S = S0#state{mname=ModName},
+ TVlist1 = [{asn1ct:get_name_of_def(Def),Def} || Def <- TVlist0],
+ case check_duplicate_defs(S, TVlist1) of
+ ok ->
+ storeindb_1(S, M, TVlist0, TVlist1);
+ {error,_}=Error ->
+ Error
+ end.
+
+storeindb_1(S, #module{name=ModName}=M, TVlist0, TVlist) ->
+ NewM = M#module{typeorval=findtypes_and_values(TVlist0)},
+ asn1_db:dbnew(ModName, S#state.erule),
+ asn1_db:dbput(ModName, 'MODULE', NewM),
+ asn1_db:dbput(ModName, TVlist),
+ include_default_class(S, NewM#module.name),
include_default_type(NewM#module.name),
- Res.
+ ok.
-storeindb(#state{mname=Module}=S, [H|T], Errors) ->
- Name = asn1ct:get_name_of_def(H),
- case asn1_db:dbget(Module, Name) of
- undefined ->
- asn1_db:dbput(Module, Name, H),
- storeindb(S, T, Errors);
- Prev ->
- PrevLine = asn1ct:get_pos_of_def(Prev),
- Error = return_asn1_error(S, H, {already_defined,Name,PrevLine}),
- storeindb(S, T, [Error|Errors])
- end;
-storeindb(_, [], []) ->
- ok;
-storeindb(_, [], [_|_]=Errors) ->
- {error,Errors}.
+check_duplicate_defs(S, Defs) ->
+ Set0 = sofs:relation(Defs),
+ Set1 = sofs:relation_to_family(Set0),
+ Set = sofs:to_external(Set1),
+ case [duplicate_def(S, N, Dup) || {N,[_,_|_]=Dup} <- Set] of
+ [] ->
+ ok;
+ [_|_]=E ->
+ {error,lists:append(E)}
+ end.
+
+duplicate_def(S, Name, Dups0) ->
+ Dups1 = [{asn1ct:get_pos_of_def(Def),Def} || Def <- Dups0],
+ [{Prev,_}|Dups] = lists:sort(Dups1),
+ duplicate_def_1(S, Dups, Name, Prev).
+duplicate_def_1(S, [{_,Def}|T], Name, Prev) ->
+ E = return_asn1_error(S, Def, {already_defined,Name,Prev}),
+ [E|duplicate_def_1(S, T, Name, Prev)];
+duplicate_def_1(_, [], _, _) ->
+ [].
findtypes_and_values(TVList) ->
findtypes_and_values(TVList,[],[],[],[],[],[]).%% Types,Values,
@@ -6698,99 +5737,146 @@ findtypes_and_values([],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc) ->
{lists:reverse(Tacc),lists:reverse(Vacc),lists:reverse(Pacc),
lists:reverse(Cacc),lists:reverse(Oacc),lists:reverse(OSacc)}.
+return_asn1_error(#state{error_context=Context}=S, Error) ->
+ return_asn1_error(S, Context, Error).
+
return_asn1_error(#state{mname=Where}, Item, Error) ->
Pos = asn1ct:get_pos_of_def(Item),
{structured_error,{Where,Pos},?MODULE,Error}.
-asn1_error(S, Item, Error) ->
- throw({error,return_asn1_error(S, Item, Error)}).
+asn1_error(S, Error) ->
+ throw({error,return_asn1_error(S, Error)}).
format_error({already_defined,Name,PrevLine}) ->
io_lib:format("the name ~p has already been defined at line ~p",
[Name,PrevLine]);
+format_error({duplicate_identifier,Ids}) ->
+ io_lib:format("the identifier '~p' has already been used", [Ids]);
+format_error({duplicate_tags,Elements}) ->
+ io_lib:format("duplicate tags in the elements: ~s",
+ [format_elements(Elements)]);
+format_error({enum_illegal_redefinition,Id}) ->
+ io_lib:format("'~s' must not be redefined", [Id]);
+format_error({enum_not_ascending,Id,N,Prev}) ->
+ io_lib:format("the values for enumerations which follow '...' must "
+ "be in ascending order, but '~p(~p)' is less than the "
+ "previous value '~p'", [Id,N,Prev]);
+format_error({enum_reused_value,Id,Val}) ->
+ io_lib:format("'~s' has the value '~p' which is used more than once",
+ [Id,Val]);
+format_error({illegal_id, Id}) ->
+ io_lib:format("illegal identifier: ~p", [Id]);
+format_error({illegal_choice_type, Ref}) ->
+ io_lib:format("expecting a CHOICE type: ~p", [Ref]);
+format_error({illegal_class_name,Class}) ->
+ io_lib:format("the class name '~s' is illegal (it must start with an uppercase letter and only contain uppercase letters, digits, or hyphens)", [Class]);
+format_error({illegal_COMPONENTS_OF, Ref}) ->
+ io_lib:format("expected a SEQUENCE or SET got: ~p", [Ref]);
+format_error(illegal_external_value) ->
+ "illegal value in EXTERNAL type";
format_error({illegal_instance_of,Class}) ->
io_lib:format("using INSTANCE OF on class '~s' is illegal, "
- "because INSTANCE OF may only be used on the class TYPE-IDENTFIER",
+ "because INSTANCE OF may only be used on the class TYPE-IDENTIFIER",
[Class]);
+format_error(illegal_integer_value) ->
+ "expecting an integer value";
+format_error(illegal_object) ->
+ "expecting an object";
+format_error({illegal_object_field, Id}) ->
+ io_lib:format("expecting a class field: ~p",[Id]);
+format_error({illegal_oid,o_id}) ->
+ "illegal OBJECT IDENTIFIER";
+format_error({illegal_oid,rel_oid}) ->
+ "illegal RELATIVE-OID";
format_error(illegal_octet_string_value) ->
"expecting a bstring or an hstring as value for an OCTET STRING";
format_error({illegal_typereference,Name}) ->
io_lib:format("'~p' is used as a typereference, but does not start with an uppercase letter", [Name]);
+format_error(illegal_table_constraint) ->
+ "table constraints may only be applied to CLASS.&field constructs";
+format_error(illegal_value) ->
+ "expecting a value";
+format_error({illegal_value, TYPE}) ->
+ io_lib:format("expecting a ~s value", [TYPE]);
format_error({invalid_fields,Fields,Obj}) ->
io_lib:format("invalid ~s in ~p", [format_fields(Fields),Obj]);
format_error({invalid_bit_number,Bit}) ->
io_lib:format("the bit number '~p' is invalid", [Bit]);
+format_error(invalid_table_constraint) ->
+ "the table constraint is not an object set";
+format_error(invalid_objectset) ->
+ "expecting an object set";
+format_error({implicit_tag_before,Kind}) ->
+ "illegal implicit tag before " ++
+ case Kind of
+ choice -> "'CHOICE'";
+ open_type -> "open type"
+ end;
format_error({missing_mandatory_fields,Fields,Obj}) ->
io_lib:format("missing mandatory ~s in ~p",
[format_fields(Fields),Obj]);
+format_error({missing_table_constraint,Component}) ->
+ io_lib:format("the component '~s' is referenced by a component relation constraint using the '@field-name' notation, but does not have a table constraint",
+ [Component]);
+format_error({missing_id,Id}) ->
+ io_lib:format("expected the mandatory component '~p'", [Id]);
+format_error({missing_ocft,Component}) ->
+ io_lib:format("the component '~s' must be an ObjectClassFieldType (CLASSNAME.&field-name)", [Component]);
+format_error(multiple_uniqs) ->
+ "implementation limitation: only one UNIQUE field is allowed in CLASS";
format_error({namelist_redefinition,Name}) ->
io_lib:format("the name '~s' can not be redefined", [Name]);
+format_error({param_bad_type, Ref}) ->
+ io_lib:format("'~p' is not a parameterized type", [Ref]);
+format_error(param_wrong_number_of_arguments) ->
+ "wrong number of arguments";
+format_error(reversed_range) ->
+ "ranges must be given in increasing order";
+format_error({syntax_duplicated_fields,Fields}) ->
+ io_lib:format("~s must only occur once in the syntax list",
+ [format_fields(Fields)]);
+format_error(syntax_nomatch) ->
+ "unexpected end of object definition";
+format_error({syntax_mandatory_in_optional_group,Name}) ->
+ io_lib:format("the field '&~s' must not be within an optional group since it is not optional",
+ [Name]);
+format_error({syntax_missing_mandatory_fields,Fields}) ->
+ io_lib:format("missing mandatory ~s in the syntax list",
+ [format_fields(Fields)]);
+format_error({syntax_nomatch,Actual}) ->
+ io_lib:format("~s is not the next item allowed according to the defined syntax",
+ [Actual]);
+format_error({syntax_undefined_field,Field}) ->
+ io_lib:format("'&~s' is not a field of the class being defined",
+ [Field]);
format_error({undefined,Name}) ->
io_lib:format("'~s' is referenced, but is not defined", [Name]);
+format_error({undefined_export,Ref}) ->
+ io_lib:format("'~s' is exported but is not defined", [Ref]);
+format_error({undefined_field,FieldName}) ->
+ io_lib:format("the field '&~s' is undefined", [FieldName]);
format_error({undefined_import,Ref,Module}) ->
io_lib:format("'~s' is not exported from ~s", [Ref,Module]);
+format_error({unique_and_default,Field}) ->
+ io_lib:format("the field '&~s' must not have both 'UNIQUE' and 'DEFAULT'",
+ [Field]);
format_error({value_reused,Val}) ->
io_lib:format("the value '~p' is used more than once", [Val]);
+format_error({non_unique_object,Id}) ->
+ io_lib:format("object set with a UNIQUE field value of '~p' is used more than once", [Id]);
format_error(Other) ->
io_lib:format("~p", [Other]).
format_fields([F]) ->
- io_lib:format("field &~s", [F]);
+ io_lib:format("field '&~s'", [F]);
format_fields([H|T]) ->
- [io_lib:format("fields &~s", [H])|
- [io_lib:format(", &~s", [F]) || F <- T]].
-
-error({_,{structured_error,_,_,_}=SE,_}) ->
- SE;
-error({export,Msg,#state{mname=Mname,type=Ref,tname=Typename}}) ->
- Pos = Ref#'Externaltypereference'.pos,
- io:format("asn1error:~p:~p:~p~n~p~n",[Pos,Mname,Typename,Msg]),
- {error,{export,Pos,Mname,Typename,Msg}};
-% error({type,{Msg1,Msg2},#state{mname=Mname,type=Type,tname=Typename}})
-% when is_record(Type,typedef) ->
-% io:format("asn1error:~p:~p:~p ~p~n",
-% [Type#typedef.pos,Mname,Typename,Msg1]),
-% {error,{type,Type#typedef.pos,Mname,Typename,Msg1,Msg2}};
-error({type,Msg,#state{mname=Mname,type=Type,tname=Typename}})
- when is_record(Type,type) ->
- io:format("asn1error:~p:~p~n~p~n",
- [Mname,Typename,Msg]),
- {error,{type,Mname,Typename,Msg}};
-error({type,Msg,#state{mname=Mname,type=Type,tname=Typename}})
- when is_record(Type,typedef) ->
- io:format("asn1error:~p:~p:~p~n~p~n",
- [Type#typedef.pos,Mname,Typename,Msg]),
- {error,{type,Type#typedef.pos,Mname,Typename,Msg}};
-error({type,Msg,#state{mname=Mname,type=Type,tname=Typename}})
- when is_record(Type,ptypedef) ->
- io:format("asn1error:~p:~p:~p~n~p~n",
- [Type#ptypedef.pos,Mname,Typename,Msg]),
- {error,{type,Type#ptypedef.pos,Mname,Typename,Msg}};
-error({type,Msg,#state{mname=Mname,value=Value,vname=Valuename}})
- when is_record(Value,valuedef) ->
- io:format("asn1error:~p:~p:~p~n~p~n",[Value#valuedef.pos,Mname,Valuename,Msg]),
- {error,{type,Value#valuedef.pos,Mname,Valuename,Msg}};
-error({type,Msg,#state{mname=Mname,type=Type,tname=Typename}})
- when is_record(Type,pobjectdef) ->
- io:format("asn1error:~p:~p:~p~n~p~n",
- [Type#pobjectdef.pos,Mname,Typename,Msg]),
- {error,{type,Type#pobjectdef.pos,Mname,Typename,Msg}};
-error({value,Msg,#state{mname=Mname,value=Value,vname=Valuename}})
- when is_record(Value,valuedef) ->
- io:format("asn1error:~p:~p:~p~n~p~n",[Value#valuedef.pos,Mname,Valuename,Msg]),
- {error,{value,Value#valuedef.pos,Mname,Valuename,Msg}};
-error({Other,Msg,#state{mname=Mname,value=#valuedef{pos=Pos},vname=Valuename}}) ->
- io:format("asn1error:~p:~p:~p~n~p~n",[Pos,Mname,Valuename,Msg]),
- {error,{Other,Pos,Mname,Valuename,Msg}};
-error({Other,Msg,#state{mname=Mname,type=#typedef{pos=Pos},tname=Typename}}) ->
- io:format("asn1error:~p:~p:~p~n~p~n",[Pos,Mname,Typename,Msg]),
- {error,{Other,Pos,Mname,Typename,Msg}};
-error({Other,Msg,#state{mname=Mname,type=#classdef{pos=Pos},tname=Typename}}) ->
- io:format("asn1error:~p:~p:~p~n~p~n",[Pos,Mname,Typename,Msg]),
- {error,{Other,Pos,Mname,Typename,Msg}};
-error({Other,Msg,#state{mname=Mname,type=Type,tname=Typename}}) ->
- io:format("asn1error:~p:~p:~p~n~p~n",[asn1ct:get_pos_of_def(Type),Mname,Typename,Msg]),
- {error,{Other,asn1ct:get_pos_of_def(Type),Mname,Typename,Msg}}.
+ [io_lib:format("fields '&~s'", [H])|
+ [io_lib:format(", '&~s'", [F]) || F <- T]].
+
+format_elements([H1,H2|T]) ->
+ [io_lib:format("~p, ", [H1])|format_elements([H2|T])];
+format_elements([H]) ->
+ io_lib:format("~p", [H]).
include_default_type(Module) ->
NameAbsList = default_type_list(),
@@ -6953,62 +6039,62 @@ default_type_list() ->
].
-include_default_class(S,Module) ->
- NameAbsList = default_class_list(S),
- include_default_class1(Module,NameAbsList).
+include_default_class(S, Module) ->
+ _ = [include_default_class1(S, Module, ClassDef) ||
+ ClassDef <- default_class_list()],
+ ok.
-include_default_class1(_,[]) ->
- ok;
-include_default_class1(Module,[{Name,TS}|Rest]) ->
- case asn1_db:dbget(Module,Name) of
+include_default_class1(S, Module, {Name,Ts0}) ->
+ case asn1_db:dbget(Module, Name) of
undefined ->
- C = #classdef{checked=true,module=Module,name=Name,
- typespec=TS},
- asn1_db:dbput(Module,Name,C);
- _ -> ok
- end,
- include_default_class1(Module,Rest).
+ #objectclass{fields=Fields,
+ syntax={'WITH SYNTAX',Syntax0}} = Ts0,
+ Syntax = preprocess_syntax(S, Syntax0, Fields),
+ Ts = Ts0#objectclass{syntax={preprocessed_syntax,Syntax}},
+ C = #classdef{checked=true,module=Module,
+ name=Name,typespec=Ts},
+ asn1_db:dbput(Module, Name, C);
+ _ ->
+ ok
+ end.
-default_class_list(S) ->
+default_class_list() ->
[{'TYPE-IDENTIFIER',
- {objectclass,
- [{fixedtypevaluefield,
- id,
- #type{tag=?TAG_PRIMITIVE(?N_OBJECT_IDENTIFIER),
- def='OBJECT IDENTIFIER'},
- 'UNIQUE',
- 'MANDATORY'},
- {typefield,'Type','MANDATORY'}],
- {'WITH SYNTAX',
- [{typefieldreference,'Type'},
- 'IDENTIFIED',
- 'BY',
- {valuefieldreference,id}]}}},
+ #objectclass{fields=[{fixedtypevaluefield,
+ id,
+ #type{tag=[?TAG_PRIMITIVE(?N_OBJECT_IDENTIFIER)],
+ def='OBJECT IDENTIFIER'},
+ 'UNIQUE',
+ 'MANDATORY'},
+ {typefield,'Type','MANDATORY'}],
+ syntax={'WITH SYNTAX',
+ [{typefieldreference,'Type'},
+ 'IDENTIFIED',
+ 'BY',
+ {valuefieldreference,id}]}}},
{'ABSTRACT-SYNTAX',
- {objectclass,
- [{fixedtypevaluefield,
- id,
- #type{tag=?TAG_PRIMITIVE(?N_OBJECT_IDENTIFIER),
- def='OBJECT IDENTIFIER'},
- 'UNIQUE',
- 'MANDATORY'},
- {typefield,'Type','MANDATORY'},
- {fixedtypevaluefield,
- property,
- #type{tag=?TAG_PRIMITIVE(?N_BIT_STRING),
- def={'BIT STRING',[]}},
- undefined,
- {'DEFAULT',
- [0,1,0]}}],
- {'WITH SYNTAX',
- [{typefieldreference,'Type'},
- 'IDENTIFIED',
- 'BY',
- {valuefieldreference,id},
- ['HAS',
- 'PROPERTY',
- {valuefieldreference,property}]]}}}].
-
+ #objectclass{fields=[{fixedtypevaluefield,
+ id,
+ #type{tag=[?TAG_PRIMITIVE(?N_OBJECT_IDENTIFIER)],
+ def='OBJECT IDENTIFIER'},
+ 'UNIQUE',
+ 'MANDATORY'},
+ {typefield,'Type','MANDATORY'},
+ {fixedtypevaluefield,
+ property,
+ #type{tag=[?TAG_PRIMITIVE(?N_BIT_STRING)],
+ def={'BIT STRING',[]}},
+ undefined,
+ {'DEFAULT',
+ [0,1,0]}}],
+ syntax={'WITH SYNTAX',
+ [{typefieldreference,'Type'},
+ 'IDENTIFIED',
+ 'BY',
+ {valuefieldreference,id},
+ ['HAS',
+ 'PROPERTY',
+ {valuefieldreference,property}]]}}}].
new_reference_name(Name) ->
case get(asn1_reference) of
@@ -7037,8 +6123,9 @@ insert_once(S,Tab,Key) ->
skipped
end.
-check_fold(S, [H|T], Check) ->
- Type = asn1_db:dbget(S#state.mname, H),
+check_fold(S0, [H|T], Check) ->
+ Type = asn1_db:dbget(S0#state.mname, H),
+ S = S0#state{error_context=Type},
case Check(S, H, Type) of
ok ->
check_fold(S, T, Check);
@@ -7047,5 +6134,19 @@ check_fold(S, [H|T], Check) ->
end;
check_fold(_, [], Check) when is_function(Check, 3) -> [].
+error_value(Value) when is_integer(Value) -> Value;
+error_value(Value) when is_atom(Value) -> Value;
+error_value(#type{def=Value}) when is_atom(Value) -> Value;
+error_value(#type{def=Value}) -> error_value(Value);
+error_value(RefOrType) ->
+ try name_of_def(RefOrType) of
+ Name -> Name
+ catch _:_ ->
+ case get_datastr_name(RefOrType) of
+ undefined -> RefOrType;
+ Name -> Name
+ end
+ end.
+
name_of_def(#'Externaltypereference'{type=N}) -> N;
name_of_def(#'Externalvaluereference'{value=N}) -> N.
diff --git a/lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl b/lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl
index 5fadd0495a..820d19b85c 100644
--- a/lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl
+++ b/lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2002-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2002-2014. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -234,7 +234,7 @@ gen_decode_sequence(Erules,Typename,D) when is_record(D,type) ->
asn1ct_name:new(rb),
emit([" {'",RecordName,"'}.",nl,nl]);
{LeadingAttrTerm,PostponedDecArgs} ->
- emit([com,nl,nl]),
+ emit([nl]),
case {LeadingAttrTerm,PostponedDecArgs} of
{[],[]} ->
ok;
@@ -413,7 +413,7 @@ gen_decode_set(Erules,Typename,D) when is_record(D,type) ->
%% return value as record
emit([" {'",RecordName,"'}.",nl]);
{LeadingAttrTerm,PostponedDecArgs} ->
- emit([com,nl,nl]),
+ emit([nl]),
case {LeadingAttrTerm,PostponedDecArgs} of
{[],[]} ->
ok;
@@ -617,18 +617,20 @@ gen_dec_sequence_call1(Erules,TopType,[#'ComponentType'{name=Cname,typespec=Type
{LA,PostponedDec} =
gen_dec_component(Erules,TopType,Cname,Tags,Type,Num,Prop,
Ext,DecObjInf),
+ emit([com,nl]),
case Rest of
[] ->
{LA ++ LeadingAttrAcc,PostponedDec ++ ArgsAcc};
_ ->
- emit([com,nl]),
asn1ct_name:new(bytes),
gen_dec_sequence_call1(Erules,TopType,Rest,Num+1,Ext,DecObjInf,
LA++LeadingAttrAcc,PostponedDec++ArgsAcc)
end;
gen_dec_sequence_call1(_Erules,_TopType,[],1,_,_,_,_) ->
- no_terms.
+ no_terms;
+gen_dec_sequence_call1(_, _, [], _Num, _, _, LA, PostponedDec) ->
+ {LA, PostponedDec}.
gen_dec_sequence_call2(_Erules,_TopType, {[], [], []}, _Ext,_DecObjInf) ->
no_terms;
@@ -643,7 +645,6 @@ gen_dec_sequence_call2(Erules,TopType,{Root1,EList,Root2},_Ext,DecObjInf) ->
%% TagList is the tags of Root2 elements from the first up to and
%% including the first mandatory element.
TagList = get_root2_taglist(Root2,[]),
- emit({com,nl}),
emit([{curr,tlv}," = ",
{call,ber,skip_ExtensionAdditions,
[{prev,tlv},{asis,TagList}]},com,nl]),
diff --git a/lib/asn1/src/asn1ct_constructed_per.erl b/lib/asn1/src/asn1ct_constructed_per.erl
index a91404ed54..0bc6688a49 100644
--- a/lib/asn1/src/asn1ct_constructed_per.erl
+++ b/lib/asn1/src/asn1ct_constructed_per.erl
@@ -410,12 +410,11 @@ gen_dec_open_type(Erule, Val, {Xmod,Xtype}, LeadingAttr,
#classdef{typespec=ClassDef} = asn1_db:dbget(ClMod, ClType),
#objectclass{fields=ClassFields} = ClassDef,
Extensible = lists:member('EXTENSIONMARK', ObjSet1),
- ObjSet2 = [{Key,fix_object_code(Name, Code, ClassFields)} ||
- {_,Key,Code} <- ObjSet1],
- ObjSet = lists:sort([P || {_,B}=P <- ObjSet2, B =/= none]),
+ Typename = [Name,ClType],
+ ObjSet = index_object_set(Erule, ClType, Name,
+ ObjSet1, ClassFields),
Key = erlang:md5(term_to_binary({decode,ObjSet,RestFieldNames,
Prop,Extensible})),
- Typename = [Name,ClType],
Gen = fun(_Fd, N) ->
dec_objset_optional(N, Prop),
dec_objset(Erule, N, ObjSet, RestFieldNames, Typename),
@@ -467,46 +466,15 @@ dec_objset_2(Erule, Obj, RestFields0, Typename) ->
Imm = asn1ct_gen_per:gen_dec_imm(Erule, Type),
{Term,_} = asn1ct_imm:dec_slim_cg(Imm, 'Bytes'),
emit([com,nl,Term]);
- #typedef{name={constructed,bif},typespec=Def} ->
- InnerType = asn1ct_gen:get_inner(Def#type.def),
- case InnerType of
- 'CHOICE' ->
- asn1ct_name:start(),
- asn1ct_name:new(bytes),
- {'CHOICE',CompList} = Def#type.def,
- Ext = extensible_enc(CompList),
- emit(["{Result,_} = begin",nl]),
- gen_dec_choice(Erule, Typename, CompList, Ext),
- emit([nl,
- "end",com,nl,
- "Result"]);
- 'SET' ->
- Imm0 = gen_dec_constructed_imm(Erule, Typename, Def),
- Imm = opt_imm(Imm0),
- asn1ct_name:start(),
- emit(["{Result,_} = begin",nl]),
- emit_gen_dec_imm(Imm),
- emit([nl,
- "end",com,nl,
- "Result"]);
- 'SET OF' ->
- asn1ct_name:start(),
- do_gen_decode_sof(Erule, Typename, 'SET OF',
- Def, false);
- 'SEQUENCE' ->
- Imm0 = gen_dec_constructed_imm(Erule, Typename, Def),
- Imm = opt_imm(Imm0),
- asn1ct_name:start(),
- emit(["{Result,_} = begin",nl]),
- emit_gen_dec_imm(Imm),
- emit([nl,
- "end",com,nl,
- "Result"]);
- 'SEQUENCE OF' ->
- asn1ct_name:start(),
- do_gen_decode_sof(Erule, Typename, 'SEQUENCE OF',
- Def, false)
- end;
+ #typedef{name={constructed,bif},typespec=Type}=Def ->
+ Prefix = "dec_outlined_",
+ Key = {dec_outlined,Def},
+ Gen = fun(_Fd, Name) ->
+ gen_dec_obj(Erule, Name, Typename, Type)
+ end,
+ Func = asn1ct_func:call_gen(Prefix, Key, Gen),
+ emit(["{Term,_} = ",{asis,Func},"(Bytes)",com,nl,
+ "Term"]);
#typedef{name=Type} ->
emit(["{Result,_} = ",{asis,enc_func("dec_", Type)},"(Bytes),",nl,
"Result"]);
@@ -531,6 +499,12 @@ dec_objset_2(Erule, Obj, RestFields0, Typename) ->
end
end.
+gen_dec_obj(Erules, Name, Typename, Type) ->
+ emit([{asis,Name},"(Bytes) ->",nl]),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ asn1ct_gen:gen_decode_constructed(Erules, Typename,
+ InnerType, Type).
+
gen_encode_choice(Erule, TopType, D) ->
asn1ct_name:start(),
Imm = gen_encode_choice_imm(Erule, TopType, D),
@@ -595,10 +569,10 @@ gen_encode_sof_imm(Erule, Typename, SeqOrSetOf, #type{}=D) ->
gen_decode_sof(Erules, Typename, SeqOrSetOf, #type{}=D) ->
asn1ct_name:start(),
- do_gen_decode_sof(Erules, Typename, SeqOrSetOf, D, true),
+ do_gen_decode_sof(Erules, Typename, SeqOrSetOf, D),
emit([".",nl,nl]).
-do_gen_decode_sof(Erules, Typename, SeqOrSetOf, D, NeedRest) ->
+do_gen_decode_sof(Erules, Typename, SeqOrSetOf, D) ->
{_SeqOrSetOf,ComponentType} = D#type.def,
SizeConstraint = asn1ct_imm:effective_constraint(bitstring,
D#type.constraint),
@@ -610,12 +584,11 @@ do_gen_decode_sof(Erules, Typename, SeqOrSetOf, D, NeedRest) ->
""
end,
{Num,Buf} = gen_decode_length(SizeConstraint, Erules),
- Key = erlang:md5(term_to_binary({Typename,SeqOrSetOf,
- ComponentType,NeedRest})),
+ Key = erlang:md5(term_to_binary({Typename,SeqOrSetOf,ComponentType})),
Gen = fun(_Fd, Name) ->
gen_decode_sof_components(Erules, Name,
Typename, SeqOrSetOf,
- ComponentType, NeedRest)
+ ComponentType)
end,
F = asn1ct_func:call_gen("dec_components", Key, Gen),
emit([",",nl,
@@ -629,7 +602,7 @@ gen_decode_length(Constraint, Erule) ->
Imm = asn1ct_imm:per_dec_length(Constraint, true, is_aligned(Erule)),
asn1ct_imm:dec_slim_cg(Imm, "Bytes").
-gen_decode_sof_components(Erule, Name, Typename, SeqOrSetOf, Cont, NeedRest) ->
+gen_decode_sof_components(Erule, Name, Typename, SeqOrSetOf, Cont) ->
{ObjFun,ObjFun_Var} =
case Cont#type.tablecinf of
[{objfun,_}|_R] ->
@@ -637,14 +610,8 @@ gen_decode_sof_components(Erule, Name, Typename, SeqOrSetOf, Cont, NeedRest) ->
_ ->
{"",""}
end,
- case NeedRest of
- false ->
- emit([{asis,Name},"(0, _Bytes",ObjFun_Var,", Acc) ->",nl,
- "lists:reverse(Acc);",nl]);
- true ->
- emit([{asis,Name},"(0, Bytes",ObjFun_Var,", Acc) ->",nl,
- "{lists:reverse(Acc),Bytes};",nl])
- end,
+ emit([{asis,Name},"(0, Bytes",ObjFun_Var,", Acc) ->",nl,
+ "{lists:reverse(Acc),Bytes};",nl]),
emit([{asis,Name},"(Num, Bytes",ObjFun,", Acc) ->",nl,
"{Term,Remain} = "]),
Constructed_Suffix = asn1ct_gen:constructed_suffix(SeqOrSetOf,
@@ -1024,11 +991,12 @@ enc_var_type_call(Erule, Name, RestFieldNames,
#classdef{typespec=ClassDef} = asn1_db:dbget(ClMod, ClType),
#objectclass{fields=ClassFields} = ClassDef,
Extensible = lists:member('EXTENSIONMARK', ObjSet1),
- ObjSet2 = [{Key,fix_object_code(Name, Code, ClassFields)} ||
- {_,Key,Code} <- ObjSet1],
- ObjSet = lists:sort([P || {_,B}=P <- ObjSet2, B =/= none]),
+ ObjSet = index_object_set(Erule, ClType, Name,
+ ObjSet1, ClassFields),
Key = erlang:md5(term_to_binary({encode,ObjSet,RestFieldNames,Extensible})),
- Imm = enc_objset_imm(Erule, Name, ObjSet, RestFieldNames, Extensible),
+ TypeName = [ClType,Name],
+ Imm = enc_objset_imm(Erule, TypeName, Name, ObjSet,
+ RestFieldNames, Extensible),
Lambda = {lambda,[{var,"Val"},{var,"Id"}],Imm},
Gen = fun(_Fd, N) ->
Aligned = is_aligned(Erule),
@@ -1039,11 +1007,27 @@ enc_var_type_call(Erule, Name, RestFieldNames,
Prefix = lists:concat(["enc_os_",Name]),
[{call_gen,Prefix,Key,Gen,Lambda,[Val,Fun]}].
-fix_object_code(Name, [{Name,B}|_], _ClassFields) ->
- B;
-fix_object_code(Name, [_|T], ClassFields) ->
- fix_object_code(Name, T, ClassFields);
-fix_object_code(Name, [], ClassFields) ->
+index_object_set(_Erules, _ClType, Name, Set0, ClassFields) ->
+ Set = index_object_set_1(Name, Set0, ClassFields),
+ lists:sort(Set).
+
+index_object_set_1(Name, [{_,Key,Code}|T], ClassFields) ->
+ case index_object_set_2(Name, Code, ClassFields) of
+ none ->
+ index_object_set_1(Name, T, ClassFields);
+ Type ->
+ [{Key,Type}|index_object_set_1(Name, T, ClassFields)]
+ end;
+index_object_set_1(Name, [_|T], ClassFields) ->
+ index_object_set_1(Name, T, ClassFields);
+index_object_set_1(_, [], _) ->
+ [].
+
+index_object_set_2(Name, [{Name,Type}|_], _ClassFields) ->
+ Type;
+index_object_set_2(Name, [_|T], ClassFields) ->
+ index_object_set_2(Name, T, ClassFields);
+index_object_set_2(Name, [], ClassFields) ->
case lists:keyfind(Name, 2, ClassFields) of
{typefield,Name,'OPTIONAL'} ->
none;
@@ -1059,7 +1043,8 @@ fix_object_code(Name, [], ClassFields) ->
end
end.
-enc_objset_imm(Erule, Component, ObjSet, RestFieldNames, Extensible) ->
+enc_objset_imm(Erule, TypeName, Component, ObjSet,
+ RestFieldNames, Extensible) ->
Aligned = is_aligned(Erule),
E = {error,
fun() ->
@@ -1070,7 +1055,7 @@ enc_objset_imm(Erule, Component, ObjSet, RestFieldNames, Extensible) ->
end},
[{'cond',
[[{eq,{var,"Id"},Key}|
- enc_obj(Erule, Obj, RestFieldNames, Aligned)] ||
+ enc_obj(Erule, Obj, TypeName, RestFieldNames, Aligned)] ||
{Key,Obj} <- ObjSet] ++
[['_',case Extensible of
false ->
@@ -1086,24 +1071,18 @@ enc_objset_imm(Erule, Component, ObjSet, RestFieldNames, Extensible) ->
end
end]]}].
-enc_obj(Erule, Obj, RestFieldNames0, Aligned) ->
+enc_obj(Erule, Obj, TypeName, RestFieldNames0, Aligned) ->
+ Val = {var,"Val"},
case Obj of
+ #typedef{name={constructed,bif},typespec=Type}=Def ->
+ Prefix = "enc_outlined_",
+ Key = {enc_outlined,Def},
+ Gen = fun(_Fd, Name) ->
+ gen_enc_obj(Erule, Name, TypeName, Type)
+ end,
+ [{call_gen,Prefix,Key,Gen,undefined,[Val]}];
#typedef{name={primitive,bif},typespec=Def} ->
asn1ct_gen_per:gen_encode_prim_imm({var,"Val"}, Def, Aligned);
- #typedef{name={constructed,bif},typespec=Def} ->
- InnerType = asn1ct_gen:get_inner(Def#type.def),
- case InnerType of
- 'CHOICE' ->
- gen_encode_choice_imm(Erule, name, Def);
- 'SET' ->
- gen_encode_constructed_imm(Erule, name, Def);
- 'SET OF' ->
- gen_encode_sof_imm(Erule, name, InnerType, Def);
- 'SEQUENCE' ->
- gen_encode_constructed_imm(Erule, name, Def);
- 'SEQUENCE OF' ->
- gen_encode_sof_imm(Erule, name, InnerType, Def)
- end;
#typedef{name=Type} ->
[{apply,{local,enc_func(Type),Type},[{var,"Val"}]}];
#'Externalvaluereference'{module=Mod,value=Value} ->
@@ -1112,7 +1091,8 @@ enc_obj(Erule, Obj, RestFieldNames0, Aligned) ->
{object,_,Fields} = Def,
[NextField|RestFieldNames] = RestFieldNames0,
{NextField,Typedef} = lists:keyfind(NextField, 1, Fields),
- enc_obj(Erule, Typedef, RestFieldNames, Aligned)
+ enc_obj(Erule, Typedef, TypeName,
+ RestFieldNames, Aligned)
end;
#'Externaltypereference'{module=Mod,type=Type} ->
Func = enc_func(Type),
@@ -1124,6 +1104,11 @@ enc_obj(Erule, Obj, RestFieldNames0, Aligned) ->
end
end.
+gen_enc_obj(Erules, Name, Typename, Type) ->
+ emit([{asis,Name},"(Val) ->",nl]),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ asn1ct_gen:gen_encode_constructed(Erules, Typename,
+ InnerType, Type).
gen_dec_components_call(Erule, TopType, {Root,ExtList},
DecInfObj, Ext, NumberOfOptionals) ->
diff --git a/lib/asn1/src/asn1ct_gen.erl b/lib/asn1/src/asn1ct_gen.erl
index 450d309688..d3c1f34821 100644
--- a/lib/asn1/src/asn1ct_gen.erl
+++ b/lib/asn1/src/asn1ct_gen.erl
@@ -531,34 +531,30 @@ gen_part_decode_funcs({primitive,bif},_TypeName,
gen_part_decode_funcs(WhatKind,_TypeName,{_,Directive,_,_}) ->
throw({error,{asn1,{"Not implemented yet",WhatKind," partial incomplete directive:",Directive}}}).
-
-gen_types(Erules,Tname,{RootL1,ExtList,RootL2})
+%% EncDec = 'gen_encode' | 'gen_decode'
+gen_types(Erules, Tname, {RootL1,ExtList,RootL2}, EncDec)
when is_list(RootL1), is_list(RootL2) ->
- gen_types(Erules,Tname,RootL1),
- Rtmod = ct_gen_module(Erules),
- gen_types(Erules,Tname,Rtmod:extaddgroup2sequence(ExtList)),
- gen_types(Erules,Tname,RootL2);
-gen_types(Erules,Tname,{RootList,ExtList}) when is_list(RootList) ->
- gen_types(Erules,Tname,RootList),
+ gen_types(Erules, Tname, RootL1, EncDec),
Rtmod = ct_gen_module(Erules),
- gen_types(Erules,Tname,Rtmod:extaddgroup2sequence(ExtList));
-gen_types(Erules,Tname,[{'EXTENSIONMARK',_,_}|Rest]) ->
- gen_types(Erules,Tname,Rest);
-gen_types(Erules,Tname,[ComponentType|Rest]) ->
+ gen_types(Erules, Tname, Rtmod:extaddgroup2sequence(ExtList), EncDec),
+ gen_types(Erules, Tname, RootL2, EncDec);
+gen_types(Erules, Tname, {RootList,ExtList}, EncDec) when is_list(RootList) ->
+ gen_types(Erules, Tname, RootList, EncDec),
Rtmod = ct_gen_module(Erules),
+ gen_types(Erules, Tname, Rtmod:extaddgroup2sequence(ExtList), EncDec);
+gen_types(Erules, Tname, [{'EXTENSIONMARK',_,_}|T], EncDec) ->
+ gen_types(Erules, Tname, T, EncDec);
+gen_types(Erules, Tname, [ComponentType|T], EncDec) ->
asn1ct_name:clear(),
- Rtmod:gen_encode(Erules,Tname,ComponentType),
- asn1ct_name:clear(),
- Rtmod:gen_decode(Erules,Tname,ComponentType),
- gen_types(Erules,Tname,Rest);
-gen_types(_,_,[]) ->
- true;
-gen_types(Erules,Tname,Type) when is_record(Type,type) ->
Rtmod = ct_gen_module(Erules),
+ Rtmod:EncDec(Erules, Tname, ComponentType),
+ gen_types(Erules, Tname, T, EncDec);
+gen_types(_, _, [], _) ->
+ ok;
+gen_types(Erules, Tname, #type{}=Type, EncDec) ->
asn1ct_name:clear(),
- Rtmod:gen_encode(Erules,Tname,Type),
- asn1ct_name:clear(),
- Rtmod:gen_decode(Erules,Tname,Type).
+ Rtmod = ct_gen_module(Erules),
+ Rtmod:EncDec(Erules, Tname, Type).
%% VARIOUS GENERATOR STUFF
%% *************************************************
@@ -599,25 +595,25 @@ gen_encode_constructed(Erules,Typename,InnerType,D) when is_record(D,type) ->
'SET' ->
Rtmod:gen_encode_set(Erules,Typename,D),
#'SET'{components=Components} = D#type.def,
- gen_types(Erules,Typename,Components);
+ gen_types(Erules, Typename, Components, gen_encode);
'SEQUENCE' ->
Rtmod:gen_encode_sequence(Erules,Typename,D),
#'SEQUENCE'{components=Components} = D#type.def,
- gen_types(Erules,Typename,Components);
+ gen_types(Erules, Typename, Components, gen_encode);
'CHOICE' ->
Rtmod:gen_encode_choice(Erules,Typename,D),
{_,Components} = D#type.def,
- gen_types(Erules,Typename,Components);
+ gen_types(Erules, Typename, Components, gen_encode);
'SEQUENCE OF' ->
Rtmod:gen_encode_sof(Erules,Typename,InnerType,D),
{_,Type} = D#type.def,
NameSuffix = asn1ct_gen:constructed_suffix(InnerType,Type#type.def),
- gen_types(Erules,[NameSuffix|Typename],Type);
+ gen_types(Erules, [NameSuffix|Typename], Type, gen_encode);
'SET OF' ->
Rtmod:gen_encode_sof(Erules,Typename,InnerType,D),
{_,Type} = D#type.def,
NameSuffix = asn1ct_gen:constructed_suffix(InnerType,Type#type.def),
- gen_types(Erules,[NameSuffix|Typename],Type);
+ gen_types(Erules, [NameSuffix|Typename], Type, gen_encode);
_ ->
exit({nyi,InnerType})
end;
@@ -630,20 +626,29 @@ gen_decode_constructed(Erules,Typename,InnerType,D) when is_record(D,type) ->
asn1ct:step_in_constructed(), %% updates namelist for exclusive decode
case InnerType of
'SET' ->
- Rtmod:gen_decode_set(Erules,Typename,D);
+ Rtmod:gen_decode_set(Erules,Typename,D),
+ #'SET'{components=Components} = D#type.def,
+ gen_types(Erules, Typename, Components, gen_decode);
'SEQUENCE' ->
- Rtmod:gen_decode_sequence(Erules,Typename,D);
+ Rtmod:gen_decode_sequence(Erules,Typename,D),
+ #'SEQUENCE'{components=Components} = D#type.def,
+ gen_types(Erules, Typename, Components, gen_decode);
'CHOICE' ->
- Rtmod:gen_decode_choice(Erules,Typename,D);
+ Rtmod:gen_decode_choice(Erules,Typename,D),
+ {_,Components} = D#type.def,
+ gen_types(Erules, Typename, Components, gen_decode);
'SEQUENCE OF' ->
- Rtmod:gen_decode_sof(Erules,Typename,InnerType,D);
+ Rtmod:gen_decode_sof(Erules,Typename,InnerType,D),
+ {_,#type{def=Def}=Type} = D#type.def,
+ NameSuffix = asn1ct_gen:constructed_suffix(InnerType, Def),
+ gen_types(Erules, [NameSuffix|Typename], Type, gen_decode);
'SET OF' ->
- Rtmod:gen_decode_sof(Erules,Typename,InnerType,D);
- _ ->
- exit({nyi,InnerType})
+ Rtmod:gen_decode_sof(Erules,Typename,InnerType,D),
+ {_,#type{def=Def}=Type} = D#type.def,
+ NameSuffix = asn1ct_gen:constructed_suffix(InnerType, Def),
+ gen_types(Erules, [NameSuffix|Typename], Type, gen_decode)
end;
-
gen_decode_constructed(Erules,Typename,InnerType,D) when is_record(D,typedef) ->
gen_decode_constructed(Erules,Typename,InnerType,D#typedef.typespec).
diff --git a/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl b/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl
index e51b0898be..37413298a7 100644
--- a/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl
+++ b/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2002-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2002-2014. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -24,7 +24,7 @@
-include("asn1_records.hrl").
--export([decode_class/1, decode_type/1]).
+-export([decode_class/1]).
-export([gen_encode/2,gen_encode/3,gen_decode/2,gen_decode/3]).
-export([gen_encode_prim/4]).
-export([gen_dec_prim/3]).
@@ -278,8 +278,7 @@ emit_enc_enumerated_cases(L, Tags) ->
emit_enc_enumerated_cases(L, Tags, noext).
emit_enc_enumerated_cases([{EnumName,EnumVal}|T], Tags, Ext) ->
- Bytes = encode_pos_integer(EnumVal, []),
- Len = length(Bytes),
+ {Bytes,Len} = encode_integer(EnumVal),
emit([{asis,EnumName}," -> ",
{call,ber,encode_tags,[Tags,{asis,Bytes},Len]},";",nl]),
emit_enc_enumerated_cases(T, Tags, Ext);
@@ -288,10 +287,25 @@ emit_enc_enumerated_cases([], _Tags, _Ext) ->
emit([{curr,enumval}," -> exit({error,{asn1, {enumerated_not_in_range,",{curr, enumval},"}}})"]),
emit([nl,"end"]).
-encode_pos_integer(0, [B|_Acc] = L) when B < 128 ->
+encode_integer(Val) ->
+ Bytes =
+ if
+ Val >= 0 ->
+ encode_integer_pos(Val, []);
+ true ->
+ encode_integer_neg(Val, [])
+ end,
+ {Bytes,length(Bytes)}.
+
+encode_integer_pos(0, [B|_Acc]=L) when B < 128 ->
L;
-encode_pos_integer(N, Acc) ->
- encode_pos_integer(N bsr 8, [N band 255|Acc]).
+encode_integer_pos(N, Acc) ->
+ encode_integer_pos((N bsr 8), [N band 16#ff| Acc]).
+
+encode_integer_neg(-1, [B1|_T]=L) when B1 > 127 ->
+ L;
+encode_integer_neg(N, Acc) ->
+ encode_integer_neg(N bsr 8, [N band 16#ff|Acc]).
%%===============================================================================
%%===============================================================================
@@ -1179,23 +1193,25 @@ gen_objset_enc(_,_,{unique,undefined},_,_,_,_,_) ->
gen_objset_enc(Erules, ObjSetName, UniqueName,
[{ObjName,Val,Fields}|T], ClName, ClFields,
NthObj,Acc)->
- emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl]),
CurrMod = get(currmod),
{InternalFunc,NewNthObj}=
case ObjName of
{no_mod,no_name} ->
- gen_inlined_enc_funs(Fields,ClFields,ObjSetName,NthObj);
+ gen_inlined_enc_funs(Fields, ClFields, ObjSetName, Val, NthObj);
{CurrMod,Name} ->
- emit({" fun 'enc_",Name,"'/3"}),
+ emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl,
+ " fun 'enc_",Name,"'/3;",nl]),
{[],NthObj};
{ModuleName,Name} ->
+ emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl]),
emit_ext_fun(enc,ModuleName,Name),
+ emit([";",nl]),
{[],NthObj};
_ ->
- emit({" fun 'enc_",ObjName,"'/3"}),
+ emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl,
+ " fun 'enc_",ObjName,"'/3;",nl]),
{[],NthObj}
end,
- emit({";",nl}),
gen_objset_enc(Erules, ObjSetName, UniqueName, T, ClName, ClFields,
NewNthObj, InternalFunc ++ Acc);
%% See X.681 Annex E for the following case
@@ -1223,13 +1239,14 @@ emit_default_getenc(ObjSetName,UniqueName) ->
%% gen_inlined_enc_funs for each object iterates over all fields of a
%% class, and for each typefield it checks if the object has that
%% field and emits the proper code.
-gen_inlined_enc_funs(Fields, [{typefield,_,_}|_]=T, ObjSetName, NthObj) ->
- emit([indent(3),"fun(Type, Val, _RestPrimFieldName) ->",nl,
+gen_inlined_enc_funs(Fields, [{typefield,_,_}|_]=T, ObjSetName, Val, NthObj) ->
+ emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl,
+ indent(3),"fun(Type, Val, _RestPrimFieldName) ->",nl,
indent(6),"case Type of",nl]),
gen_inlined_enc_funs1(Fields, T, ObjSetName, [], NthObj, []);
-gen_inlined_enc_funs(Fields,[_|Rest],ObjSetName,NthObj) ->
- gen_inlined_enc_funs(Fields,Rest,ObjSetName,NthObj);
-gen_inlined_enc_funs(_,[],_,NthObj) ->
+gen_inlined_enc_funs(Fields, [_|Rest], ObjSetName, Val, NthObj) ->
+ gen_inlined_enc_funs(Fields, Rest, ObjSetName, Val, NthObj);
+gen_inlined_enc_funs(_, [], _, _, NthObj) ->
{[],NthObj}.
gen_inlined_enc_funs1(Fields, [{typefield,Name,_}|Rest], ObjSetName,
@@ -1276,7 +1293,7 @@ gen_inlined_enc_funs1(Fields,[_|Rest], ObjSetName, Sep, NthObj, Acc)->
gen_inlined_enc_funs1(Fields, Rest, ObjSetName, Sep, NthObj, Acc);
gen_inlined_enc_funs1(_, [], _, _, NthObj, Acc) ->
emit([nl,indent(6),"end",nl,
- indent(3),"end"]),
+ indent(3),"end;",nl]),
{Acc,NthObj}.
emit_enc_open_type(I) ->
@@ -1358,23 +1375,25 @@ gen_objset_dec(_,_,{unique,undefined},_,_,_,_) ->
ok;
gen_objset_dec(Erules, ObjSName, UniqueName, [{ObjName,Val,Fields}|T],
ClName, ClFields, NthObj)->
- emit(["'getdec_",ObjSName,"'(",{asis,Val},") ->",nl]),
CurrMod = get(currmod),
NewNthObj=
case ObjName of
{no_mod,no_name} ->
- gen_inlined_dec_funs(Fields,ClFields,ObjSName,NthObj);
+ gen_inlined_dec_funs(Fields,ClFields,ObjSName,Val,NthObj);
{CurrMod,Name} ->
- emit([" fun 'dec_",Name,"'/3"]),
+ emit(["'getdec_",ObjSName,"'(",{asis,Val},") ->",nl,
+ " fun 'dec_",Name,"'/3;", nl]),
NthObj;
{ModuleName,Name} ->
+ emit(["'getdec_",ObjSName,"'(",{asis,Val},") ->",nl]),
emit_ext_fun(dec,ModuleName,Name),
+ emit([";",nl]),
NthObj;
_ ->
- emit([" fun 'dec_",ObjName,"'/3"]),
+ emit(["'getdec_",ObjSName,"'(",{asis,Val},") ->",nl,
+ " fun 'dec_",ObjName,"'/3;", nl]),
NthObj
end,
- emit([";",nl]),
gen_objset_dec(Erules, ObjSName, UniqueName, T, ClName,
ClFields, NewNthObj);
gen_objset_dec(_,ObjSetName,_UniqueName,['EXTENSIONMARK'],_ClName,
@@ -1394,10 +1413,15 @@ emit_default_getdec(ObjSetName,UniqueName) ->
emit(["'getdec_",ObjSetName,"'(ErrV) ->",nl]),
emit([indent(2), "fun(C,V,_) -> exit({{component,C},{value,V},{unique_name_and_value,",{asis,UniqueName},", ErrV}}) end"]).
-gen_inlined_dec_funs(Fields, ClFields, ObjSetName, NthObj) ->
+gen_inlined_dec_funs(Fields, [{typefield,_,_}|_]=ClFields, ObjSetName, Val, NthObj) ->
+ emit(["'getdec_",ObjSetName,"'(",{asis,Val},") ->",nl]),
emit([indent(3),"fun(Type, Bytes, _RestPrimFieldName) ->",nl,
indent(6),"case Type of",nl]),
- gen_inlined_dec_funs1(Fields, ClFields, ObjSetName, "", NthObj).
+ gen_inlined_dec_funs1(Fields, ClFields, ObjSetName, "", NthObj);
+gen_inlined_dec_funs(Fields, [_|ClFields], ObjSetName, Val, NthObj) ->
+ gen_inlined_dec_funs(Fields, ClFields, ObjSetName, Val, NthObj);
+gen_inlined_dec_funs(_, _, _, _,NthObj) ->
+ NthObj.
gen_inlined_dec_funs1(Fields, [{typefield,Name,Prop}|Rest],
ObjSetName, Sep0, NthObj) ->
@@ -1439,7 +1463,7 @@ gen_inlined_dec_funs1(Fields, [_|Rest], ObjSetName, Sep, NthObj)->
gen_inlined_dec_funs1(Fields, Rest, ObjSetName, Sep, NthObj);
gen_inlined_dec_funs1(_, [], _, _, NthObj) ->
emit([nl,indent(6),"end",nl,
- indent(3),"end"]),
+ indent(3),"end;",nl]),
NthObj.
emit_dec_open_type(I) ->
@@ -1534,39 +1558,6 @@ decode_class('CONTEXT') ->
decode_class('PRIVATE') ->
?PRIVATE.
-decode_type('BOOLEAN') -> 1;
-decode_type('INTEGER') -> 2;
-decode_type('BIT STRING') -> 3;
-decode_type('OCTET STRING') -> 4;
-decode_type('NULL') -> 5;
-decode_type('OBJECT IDENTIFIER') -> 6;
-decode_type('ObjectDescriptor') -> 7;
-decode_type('EXTERNAL') -> 8;
-decode_type('REAL') -> 9;
-decode_type('ENUMERATED') -> 10;
-decode_type('EMBEDDED_PDV') -> 11;
-decode_type('UTF8String') -> 12;
-decode_type('RELATIVE-OID') -> 13;
-decode_type('SEQUENCE') -> 16;
-decode_type('SEQUENCE OF') -> 16;
-decode_type('SET') -> 17;
-decode_type('SET OF') -> 17;
-decode_type('NumericString') -> 18;
-decode_type('PrintableString') -> 19;
-decode_type('TeletexString') -> 20;
-decode_type('T61String') -> 20;
-decode_type('VideotexString') -> 21;
-decode_type('IA5String') -> 22;
-decode_type('UTCTime') -> 23;
-decode_type('GeneralizedTime') -> 24;
-decode_type('GraphicString') -> 25;
-decode_type('VisibleString') -> 26;
-decode_type('GeneralString') -> 27;
-decode_type('UniversalString') -> 28;
-decode_type('BMPString') -> 30;
-decode_type('CHOICE') -> 'CHOICE'; % choice gets the tag from the actual alternative
-decode_type(Else) -> exit({error,{asn1,{unrecognized_type,Else}}}).
-
mkfuncname(#'Externaltypereference'{module=Mod,type=EType}, DecOrEnc) ->
CurrMod = get(currmod),
case CurrMod of
diff --git a/lib/asn1/src/asn1ct_imm.erl b/lib/asn1/src/asn1ct_imm.erl
index bdd14871d1..91820e08de 100644
--- a/lib/asn1/src/asn1ct_imm.erl
+++ b/lib/asn1/src/asn1ct_imm.erl
@@ -499,6 +499,8 @@ per_dec_enumerated_fix_list([], Tail, _) -> Tail.
per_dec_integer_1([{'SingleValue',Value}], _Aligned) ->
{value,Value};
+per_dec_integer_1([{'ValueRange',{'MIN',_}}], Aligned) ->
+ per_dec_unconstrained(Aligned);
per_dec_integer_1([{'ValueRange',{Lb,'MAX'}}], Aligned) when is_integer(Lb) ->
per_decode_semi_constrained(Lb, Aligned);
per_dec_integer_1([{'ValueRange',{Lb,Ub}}], Aligned) when is_integer(Lb),
@@ -1094,6 +1096,9 @@ per_enc_integer_1(Val0, [Constr], Aligned) ->
per_enc_integer_2(Val, {'SingleValue',Sv}, Aligned) when is_integer(Sv) ->
per_enc_constrained(Val, Sv, Sv, Aligned);
+per_enc_integer_2(Val, {'ValueRange',{'MIN',Ub}}, Aligned)
+ when is_integer(Ub) ->
+ {[],{lt,Val,Ub+1},per_enc_unconstrained(Val, Aligned)};
per_enc_integer_2(Val0, {'ValueRange',{Lb,'MAX'}}, Aligned)
when is_integer(Lb) ->
{Prefix,Val} = sub_lb(Val0, Lb),
@@ -1580,7 +1585,7 @@ do_combine_put_bits(_, _, _) ->
throw(impossible).
debit(Budget0, Alternatives) ->
- case Budget0 - log2(Alternatives) of
+ case Budget0 - math:log2(Alternatives) of
Budget when Budget > 0.0 ->
Budget;
_ ->
@@ -1593,8 +1598,6 @@ num_clauses([_|T], N) ->
num_clauses(T, N+1);
num_clauses([], N) -> N.
-log2(N) ->
- math:log(N) / math:log(2.0).
collect_put_bits(Imm) ->
lists:splitwith(fun({put_bits,V,_,_}) when is_integer(V) -> true;
diff --git a/lib/asn1/src/asn1ct_parser.yrl b/lib/asn1/src/asn1ct_parser.yrl
deleted file mode 100644
index 083162f191..0000000000
--- a/lib/asn1/src/asn1ct_parser.yrl
+++ /dev/null
@@ -1,1177 +0,0 @@
-%%<copyright>
-%% <year>1997-2008</year>
-%% <holder>Ericsson AB, All Rights Reserved</holder>
-%%</copyright>
-%%<legalnotice>
-%% The contents of this file are subject to the Erlang Public License,
-%% Version 1.1, (the "License"); you may not use this file except in
-%% compliance with the License. You should have received a copy of the
-%% Erlang Public License along with this software. If not, it can be
-%% retrieved online at http://www.erlang.org/.
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% The Initial Developer of the Original Code is Ericsson AB.
-%%</legalnotice>
-%%
-Nonterminals
-ModuleDefinition ModuleIdentifier DefinitiveIdentifier DefinitiveObjIdComponentList
-DefinitiveObjIdComponent TagDefault ExtensionDefault
-ModuleBody Exports SymbolsExported Imports SymbolsImported
-SymbolsFromModuleList SymbolsFromModule GlobalModuleReference AssignedIdentifier SymbolList
-Symbol Reference AssignmentList Assignment
-ExtensionAndException
-ComponentTypeLists
-Externaltypereference Externalvaluereference DefinedType DefinedValue
-AbsoluteReference ItemSpec ItemId ComponentId TypeAssignment
-ValueAssignment
-% ValueSetTypeAssignment
-ValueSet
-Type BuiltinType NamedType ReferencedType
-Value ValueNotNull BuiltinValue ReferencedValue NamedValue
-% BooleanType
-BooleanValue IntegerType NamedNumberList NamedNumber SignedNumber
-% inlined IntegerValue
-EnumeratedType
-% inlined Enumerations
-Enumeration EnumerationItem
-% inlined EnumeratedValue
-% RealType
-RealValue NumericRealValue SpecialRealValue BitStringType
-% inlined BitStringValue
-IdentifierList
-% OctetStringType
-% inlined OctetStringValue
-% NullType NullValue
-SequenceType ComponentTypeList ComponentType
-% SequenceValue SequenceOfValue
-ComponentValueList SequenceOfType
-SAndSOfValue ValueList SetType
-% SetValue SetOfValue
-SetOfType
-ChoiceType
-% AlternativeTypeList made common with ComponentTypeList
-ChoiceValue
-AnyValue
-AnyDefBy
-SelectionType
-TaggedType Tag ClassNumber Class
-% redundant TaggedValue
-% EmbeddedPDVType EmbeddedPDVValue ExternalType ExternalValue ObjectIdentifierType
-ObjectIdentifierValue ObjIdComponentList ObjIdComponent
-% NameForm NumberForm NameAndNumberForm
-CharacterStringType
-RestrictedCharacterStringValue CharacterStringList
-% CharSyms CharsDefn
-Quadruple
-% Group Plane Row Cell
-Tuple
-% TableColumn TableRow
-% UnrestrictedCharacterString
-CharacterStringValue
-% UnrestrictedCharacterStringValue
-ConstrainedType Constraint ConstraintSpec TypeWithConstraint
-ElementSetSpecs ElementSetSpec
-%GeneralConstraint
-UserDefinedConstraint UserDefinedConstraintParameter
-UserDefinedConstraintParameters
-ExceptionSpec
-ExceptionIdentification
-Unions
-UnionMark
-UElems
-Intersections
-IntersectionElements
-IntersectionMark
-IElems
-Elements
-Elems
-SubTypeElements
-Exclusions
-LowerEndpoint
-UpperEndpoint
-LowerEndValue
-UpperEndValue
-TypeConstraints NamedConstraint PresenceConstraint
-
-ParameterizedTypeAssignment
-ParameterList
-Parameters
-Parameter
-ParameterizedType
-
-% X.681
-ObjectClassAssignment ObjectClass ObjectClassDefn
-FieldSpecs FieldSpec OptionalitySpec WithSyntaxSpec
-TokenOrGroupSpecs TokenOrGroupSpec
-SyntaxList OptionalGroup RequiredToken Word
-TypeOptionalitySpec
-ValueOrObjectOptSpec
-VSetOrOSetOptSpec
-ValueOptionalitySpec
-ObjectOptionalitySpec
-ValueSetOptionalitySpec
-ObjectSetOptionalitySpec
-% X.681 chapter 15
-InformationFromObjects
-ValueFromObject
-%ValueSetFromObjects
-TypeFromObject
-%ObjectFromObject
-%ObjectSetFromObjects
-ReferencedObjects
-FieldName
-PrimitiveFieldName
-
-ObjectAssignment
-ObjectSetAssignment
-ObjectSet
-ObjectSetElements
-Object
-ObjectDefn
-DefaultSyntax
-DefinedSyntax
-FieldSettings
-FieldSetting
-DefinedSyntaxTokens
-DefinedSyntaxToken
-Setting
-DefinedObject
-ObjectFromObject
-ObjectSetFromObjects
-ParameterizedObject
-ExternalObjectReference
-DefinedObjectSet
-DefinedObjectClass
-ExternalObjectClassReference
-
-% X.682
-TableConstraint
-ComponentRelationConstraint
-ComponentIdList
-
-% X.683
-ActualParameter
-.
-
-%UsefulType.
-
-Terminals
-'ABSENT' 'ABSTRACT-SYNTAX' 'ALL' 'ANY'
-'APPLICATION' 'AUTOMATIC' 'BEGIN' 'BIT'
-'BOOLEAN' 'BY' 'CHARACTER' 'CHOICE' 'CLASS' 'COMPONENT'
-'COMPONENTS' 'CONSTRAINED' 'DEFAULT' 'DEFINED' 'DEFINITIONS'
-'EMBEDDED' 'END' 'ENUMERATED' 'EXCEPT' 'EXPLICIT'
-'EXPORTS' 'EXTENSIBILITY' 'EXTERNAL' 'FALSE' 'FROM' 'GeneralizedTime'
-'TYPE-IDENTIFIER'
-'IDENTIFIER' 'IMPLICIT' 'IMPLIED' 'IMPORTS'
-'INCLUDES' 'INSTANCE' 'INTEGER' 'INTERSECTION'
-'MAX' 'MIN' 'MINUS-INFINITY' 'NULL'
-'OBJECT' 'ObjectDescriptor' 'OCTET' 'OF' 'OPTIONAL' 'PDV' 'PLUS-INFINITY'
-'PRESENT' 'PRIVATE' 'REAL' 'SEQUENCE' 'SET' 'SIZE'
-'STRING' 'SYNTAX' 'TAGS' 'TRUE' 'UNION'
-'UNIQUE' 'UNIVERSAL' 'UTCTime' 'WITH'
-'{' '}' '(' ')' '.' '::=' ';' ',' '@' '*' '-' '[' ']'
-'!' '..' '...' '|' '<' ':' '^'
-number identifier typereference restrictedcharacterstringtype
-bstring hstring cstring typefieldreference valuefieldreference
-objectclassreference word.
-
-Rootsymbol ModuleDefinition.
-Endsymbol '$end'.
-
-Left 300 'EXCEPT'.
-Left 200 '^'.
-Left 200 'INTERSECTION'.
-Left 100 '|'.
-Left 100 'UNION'.
-
-
-ModuleDefinition -> ModuleIdentifier
- 'DEFINITIONS'
- TagDefault
- ExtensionDefault
- '::='
- 'BEGIN'
- ModuleBody
- 'END' :
- {'ModuleBody',Ex,Im,Types} = '$7',
- {{typereference,Pos,Name},Defid} = '$1',
- #module{
- pos= Pos,
- name= Name,
- defid= Defid,
- tagdefault='$3',
- extensiondefault='$4',
- exports=Ex,
- imports=Im,
- typeorval=Types}.
-% {module, '$1','$3','$6'}.
-% Results always in a record of type module defined in asn_records.hlr
-
-ModuleIdentifier -> typereference DefinitiveIdentifier :
- put(asn1_module,'$1'#typereference.val),
- {'$1','$2'}.
-
-DefinitiveIdentifier -> '{' DefinitiveObjIdComponentList '}' : '$2' .
-DefinitiveIdentifier -> '$empty': [].
-
-DefinitiveObjIdComponentList -> DefinitiveObjIdComponent : ['$1'].
-DefinitiveObjIdComponentList -> DefinitiveObjIdComponent DefinitiveObjIdComponentList : ['$1'|'$2'].
-
-DefinitiveObjIdComponent -> identifier : '$1' . %expanded->
-% DefinitiveObjIdComponent -> NameForm : '$1' .
-DefinitiveObjIdComponent -> number : '$1' . %expanded->
-% DefinitiveObjIdComponent -> DefinitiveNumberForm : 'fix' .
-DefinitiveObjIdComponent -> identifier '(' number ')' : {'$1','$3'} . %expanded->
-% DefinitiveObjIdComponent -> DefinitiveNameAndNumberForm : {'$1','$3'} .
-
-% DefinitiveNumberForm -> number : 'fix' .
-
-% DefinitiveNameAndNumberForm -> identifier '(' DefinitiveNumberForm ')' : 'fix' .
-
-TagDefault -> 'EXPLICIT' 'TAGS' : put(tagdefault,'EXPLICIT'),'EXPLICIT' .
-TagDefault -> 'IMPLICIT' 'TAGS' : put(tagdefault,'IMPLICIT'),'IMPLICIT' .
-TagDefault -> 'AUTOMATIC' 'TAGS' : put(tagdefault,'AUTOMATIC'),'AUTOMATIC' .
-TagDefault -> '$empty': put(tagdefault,'EXPLICIT'),'EXPLICIT'. % because this is the default
-
-ExtensionDefault -> 'EXTENSIBILITY' 'IMPLIED' : 'IMPLIED'.
-ExtensionDefault -> '$empty' : 'false'. % because this is the default
-
-ModuleBody -> Exports Imports AssignmentList : {'ModuleBody','$1','$2','$3'}.
-ModuleBody -> '$empty' : {'ModuleBody',nil,nil,[]}.
-
-Exports -> 'EXPORTS' SymbolList ';' : {exports,'$2'}.
-Exports -> 'EXPORTS' ';' : {exports,[]}.
-Exports -> '$empty' : {exports,all} .
-
-% inlined above SymbolsExported -> SymbolList : '$1'.
-% inlined above SymbolsExported -> '$empty' : [].
-
-Imports -> 'IMPORTS' SymbolsFromModuleList ';' : {imports,'$2'}.
-Imports -> 'IMPORTS' ';' : {imports,[]}.
-Imports -> '$empty' : {imports,[]} .
-
-% inlined above SymbolsImported -> SymbolsFromModuleList : '$1'.
-% inlined above SymbolsImported -> '$empty' : [].
-
-SymbolsFromModuleList -> SymbolsFromModule :['$1'].
-% SymbolsFromModuleList -> SymbolsFromModuleList SymbolsFromModule :$1.%changed
-SymbolsFromModuleList -> SymbolsFromModule SymbolsFromModuleList :['$1'|'$2'].
-
-% expanded SymbolsFromModule -> SymbolList 'FROM' GlobalModuleReference : #'SymbolsFromModule'{symbols = '$1',module='$3'}.
-SymbolsFromModule -> SymbolList 'FROM' typereference : #'SymbolsFromModule'{symbols = '$1',module='$3'}.
-SymbolsFromModule -> SymbolList 'FROM' typereference '{' ValueList '}': #'SymbolsFromModule'{symbols = '$1',module='$3'}.
-%SymbolsFromModule -> SymbolList 'FROM' typereference identifier: #'SymbolsFromModule'{symbols = '$1',module='$3'}.
-%SymbolsFromModule -> SymbolList 'FROM' typereference Externalvaluereference: #'SymbolsFromModule'{symbols = '$1',module='$3'}.
-%SymbolsFromModule -> SymbolList 'FROM' typereference DefinedValue: #'SymbolsFromModule'{symbols = '$1',module='$3'}.
-
-% inlined GlobalModuleReference -> typereference AssignedIdentifier : {'$1','$2'} .
-
-% inlined above AssignedIdentifier -> '{' ValueList '}' : '$2'.
-% replaced AssignedIdentifier -> '{' DefinedValue ObjIdComponentList '}' :{'$2','$3'}.
-% not necessary , replaced by SAndSOfValue AssignedIdentifier -> ObjectIdentifierValue :'$1'.
-% AssignedIdentifier -> DefinedValue : '$1'.
-% inlined AssignedIdentifier -> '$empty' : undefined.
-
-SymbolList -> Symbol : ['$1'].
-SymbolList -> Symbol ',' SymbolList :['$1'|'$3'].
-
-Symbol -> Reference :'$1'.
-% later Symbol -> ParameterizedReference :'$1'.
-
-Reference -> typereference :'$1'.
-Reference -> identifier:'$1'.
-Reference -> typereference '{' '}':'$1'.
-Reference -> Externaltypereference '{' '}':'$1'.
-
-% later Reference -> objectclassreference :'$1'.
-% later Reference -> objectreference :'$1'.
-% later Reference -> objectsetreference :'$1'.
-
-AssignmentList -> Assignment : ['$1'].
-% modified AssignmentList -> AssignmentList Assignment : '$1'.
-AssignmentList -> Assignment AssignmentList : ['$1'|'$2'].
-
-Assignment -> TypeAssignment : '$1'.
-Assignment -> ValueAssignment : '$1'.
-% later Assignment -> ValueSetTypeAssignment : '$1'.
-Assignment -> ObjectClassAssignment : '$1'.
-% later Assignment -> ObjectAssignment : '$1'.
-% combined with ValueAssignment Assignment -> ObjectAssignment : '$1'.
-Assignment -> ObjectSetAssignment : '$1'.
-Assignment -> ParameterizedTypeAssignment : '$1'.
-%Assignment -> ParameterizedValueAssignment : '$1'.
-%Assignment -> ParameterizedValueSetTypeAssignment : '$1'.
-%Assignment -> ParameterizedObjectClassAssignment : '$1'.
-
-ObjectClassAssignment -> typereference '::=' 'CLASS' '{' FieldSpecs '}' :
-%ObjectClassAssignment -> objectclassreference '::=' 'CLASS' '{' FieldSpecs '}' :
- #typedef{pos=element(2,'$1'),name=element(3,'$1'),typespec={'CLASS','$5',[]}}.
-ObjectClassAssignment -> typereference '::=' 'CLASS' '{' FieldSpecs '}' WithSyntaxSpec :
-%ObjectClassAssignment -> objectclassreference '::=' 'CLASS' '{' FieldSpecs '}' WithSyntaxSpec :
- #typedef{pos=element(2,'$1'),name=element(3,'$1'),typespec={'CLASS','$5','$7'}}.
-
-FieldSpecs -> FieldSpec : ['$1'].
-FieldSpecs -> FieldSpec ',' FieldSpecs : ['$1'|'$3'].
-
-FieldSpec -> typefieldreference TypeOptionalitySpec : {typefield,'$1','$2'}.
-
-FieldSpec -> valuefieldreference Type 'UNIQUE' ValueOrObjectOptSpec :
- {fixedtypevaluefield,'$1','$2','UNIQUE','$4'}.
-FieldSpec -> valuefieldreference Type ValueOrObjectOptSpec :
- {fixedtypevaluefield,'$1','$2',undefined,'$3'}.
-
-FieldSpec -> valuefieldreference typefieldreference ValueOrObjectOptSpec :
- {variabletypevaluefield, '$1','$2','$3'}.
-
-FieldSpec -> typefieldreference typefieldreference VSetOrOSetOptSpec :
- {variabletypevaluesetfield, '$1','$2','$3'}.
-
-FieldSpec -> typefieldreference Type VSetOrOSetOptSpec :
- {fixedtypevaluesetfield, '$1','$2','$3'}.
-
-TypeOptionalitySpec -> 'DEFAULT' Type : {'DEFAULT','$2'}.
-TypeOptionalitySpec -> 'OPTIONAL' : 'OPTIONAL'.
-TypeOptionalitySpec -> '$empty' : 'MANDATORY'.
-
-ValueOrObjectOptSpec -> ValueOptionalitySpec : '$1'.
-ValueOrObjectOptSpec -> ObjectOptionalitySpec : '$1'.
-ValueOrObjectOptSpec -> 'OPTIONAL' : 'OPTIONAL'.
-ValueOrObjectOptSpec -> '$empty' : 'MANDATORY'.
-
-ValueOptionalitySpec -> 'DEFAULT' Value :
- case '$2' of
- {identifier,_,Id} -> {'DEFAULT',Id};
- _ -> {'DEFAULT','$2'}
- end.
-
-%ObjectOptionalitySpec -> 'DEFAULT' Object :{'DEFAULT','$1'}.
-ObjectOptionalitySpec -> 'DEFAULT' '{' FieldSetting ',' FieldSettings '}' :
- {'DEFAULT',{object,['$2'|'$4']}}.
-ObjectOptionalitySpec -> 'DEFAULT' '{' FieldSetting '}' :
- {'DEFAULT',{object, ['$2']}}.
-%ObjectOptionalitySpec -> 'DEFAULT' '{' DefinedSyntaxTokens '}' :
-% {'DEFAULT',{object, '$2'}}.
-ObjectOptionalitySpec -> 'DEFAULT' ObjectFromObject :
- {'DEFAULT',{object, '$2'}}.
-
-
-VSetOrOSetOptSpec -> ValueSetOptionalitySpec : '$1'.
-%VSetOrOSetOptSpec -> ObjectSetOptionalitySpec : '$1'.
-VSetOrOSetOptSpec -> 'OPTIONAL' : 'OPTIONAL'.
-VSetOrOSetOptSpec -> '$empty' : 'MANDATORY'.
-
-ValueSetOptionalitySpec -> 'DEFAULT' ValueSet : {'DEFAULT','$1'}.
-
-%ObjectSetOptionalitySpec -> 'DEFAULT' ObjectSet : {'DEFAULT','$1'}.
-
-OptionalitySpec -> 'DEFAULT' Type : {'DEFAULT','$2'}.
-OptionalitySpec -> 'DEFAULT' ValueNotNull :
- case '$2' of
- {identifier,_,Id} -> {'DEFAULT',Id};
- _ -> {'DEFAULT','$2'}
- end.
-OptionalitySpec -> 'OPTIONAL' : 'OPTIONAL'.
-OptionalitySpec -> '$empty' : 'MANDATORY'.
-
-WithSyntaxSpec -> 'WITH' 'SYNTAX' SyntaxList : {'WITH SYNTAX','$3'}.
-
-SyntaxList -> '{' TokenOrGroupSpecs '}' : '$2'.
-SyntaxList -> '{' '}' : [].
-
-TokenOrGroupSpecs -> TokenOrGroupSpec : ['$1'].
-TokenOrGroupSpecs -> TokenOrGroupSpec TokenOrGroupSpecs : ['$1'|'$2'].
-
-TokenOrGroupSpec -> RequiredToken : '$1'.
-TokenOrGroupSpec -> OptionalGroup : '$1'.
-
-OptionalGroup -> '[' TokenOrGroupSpecs ']' : '$2'.
-
-RequiredToken -> typereference : '$1'.
-RequiredToken -> Word : '$1'.
-RequiredToken -> ',' : '$1'.
-RequiredToken -> PrimitiveFieldName : '$1'.
-
-Word -> 'BY' : 'BY'.
-
-ParameterizedTypeAssignment -> typereference ParameterList '::=' Type :
- #ptypedef{pos=element(2,'$1'),name=element(3,'$1'),
- args='$2', typespec='$4'}.
-
-ParameterList -> '{' Parameters '}':'$2'.
-
-Parameters -> Parameter: ['$1'].
-Parameters -> Parameter ',' Parameters: ['$1'|'$3'].
-
-Parameter -> typereference: '$1'.
-Parameter -> Value: '$1'.
-Parameter -> Type ':' typereference: {'$1','$3'}.
-Parameter -> Type ':' Value: {'$1','$3'}.
-Parameter -> '{' typereference '}': {objectset,'$2'}.
-
-
-% Externaltypereference -> modulereference '.' typereference : {'$1','$3'} .
-Externaltypereference -> typereference '.' typereference : #'Externaltypereference'{pos=element(2,'$1'),module=element(3,'$1'),type=element(3,'$3')}.
-
-% Externalvaluereference -> modulereference '.' valuereference : {'$1','$3'} .
-% inlined Externalvaluereference -> typereference '.' identifier : #'Externalvaluereference'{pos=element(2,'$1'),module=element(3,'$1'),value=element(3,'$3')}.
-
-
-DefinedType -> Externaltypereference : '$1' .
-DefinedType -> typereference :
- #'Externaltypereference'{pos='$1'#typereference.pos,
- module= get(asn1_module),
- type= '$1'#typereference.val} .
-DefinedType -> typereference ParameterList : {pt,'$1','$2'}.
-DefinedType -> Externaltypereference ParameterList : {pt,'$1','$2'}.
-
-% ActualParameterList -> '{' ActualParameters '}' : '$1'.
-
-% ActualParameters -> ActualParameter : ['$1'].
-% ActualParameters -> ActualParameter ',' ActualParameters : ['$1'|'$3'].
-
-ActualParameter -> Type : '$1'.
-ActualParameter -> ValueNotNull : '$1'.
-ActualParameter -> ValueSet : '$1'.
-% later DefinedType -> ParameterizedType : '$1' .
-% later DefinedType -> ParameterizedValueSetType : '$1' .
-
-% inlined DefinedValue -> Externalvaluereference :'$1'.
-% inlined DefinedValue -> identifier :'$1'.
-% later DefinedValue -> ParameterizedValue :'$1'.
-
-% not referenced yet AbsoluteReference -> '@' GlobalModuleReference '.' ItemSpec :{'$2','$4'}.
-
-% not referenced yet ItemSpec -> typereference :'$1'.
-% not referenced yet ItemSpec -> ItemId '.' ComponentId : {'$1','$3'}.
-
-% not referenced yet ItemId -> ItemSpec : '$1'.
-
-% not referenced yet ComponentId -> identifier :'$1'.
-% not referenced yet ComponentId -> number :'$1'.
-% not referenced yet ComponentId -> '*' :'$1'.
-
-TypeAssignment -> typereference '::=' Type :
- #typedef{pos=element(2,'$1'),name=element(3,'$1'),typespec='$3'}.
-
-ValueAssignment -> identifier Type '::=' Value :
- #valuedef{pos=element(2,'$1'),name=element(3,'$1'),type='$2',value='$4'}.
-
-% later ValueSetTypeAssignment -> typereference Type '::=' ValueSet :{'ValueSetTypeAssignment','$1','$2','$4'}.
-
-
-ValueSet -> '{' ElementSetSpec '}' : {valueset,'$2'}.
-
-% record(type,{tag,def,constraint}).
-Type -> BuiltinType :#type{def='$1'}.
-Type -> 'NULL' :#type{def='NULL'}.
-Type -> TaggedType:'$1'.
-Type -> ReferencedType:#type{def='$1'}. % change notag later
-Type -> ConstrainedType:'$1'.
-
-%ANY is here for compatibility with the old ASN.1 standard from 1988
-BuiltinType -> 'ANY' AnyDefBy:
- case '$2' of
- [] -> 'ANY';
- _ -> {'ANY DEFINED BY','$2'}
- end.
-BuiltinType -> BitStringType :'$1'.
-BuiltinType -> 'BOOLEAN' :element(1,'$1').
-BuiltinType -> CharacterStringType :'$1'.
-BuiltinType -> ChoiceType :'$1'.
-BuiltinType -> 'EMBEDDED' 'PDV' :'EMBEDDED PDV'.
-BuiltinType -> EnumeratedType :'$1'.
-BuiltinType -> 'EXTERNAL' :element(1,'$1').
-% later BuiltinType -> InstanceOfType :'$1'.
-BuiltinType -> IntegerType :'$1'.
-% BuiltinType -> 'NULL' :element(1,'$1').
-% later BuiltinType -> ObjectClassFieldType :'$1'.
-BuiltinType -> 'OBJECT' 'IDENTIFIER' :'OBJECT IDENTIFIER'.
-BuiltinType -> 'OCTET' 'STRING' :'OCTET STRING'.
-BuiltinType -> 'REAL' :element(1,'$1').
-BuiltinType -> SequenceType :'$1'.
-BuiltinType -> SequenceOfType :'$1'.
-BuiltinType -> SetType :'$1'.
-BuiltinType -> SetOfType :'$1'.
-% The so called Useful types
-BuiltinType -> 'GeneralizedTime': 'GeneralizedTime'.
-BuiltinType -> 'UTCTime' :'UTCTime'.
-BuiltinType -> 'ObjectDescriptor' : 'ObjectDescriptor'.
-
-% moved BuiltinType -> TaggedType :'$1'.
-
-
-AnyDefBy -> 'DEFINED' 'BY' identifier: '$3'.
-AnyDefBy -> '$empty': [].
-
-NamedType -> identifier Type :
-%{_,Pos,Val} = '$1',
-%{'NamedType',Pos,{Val,'$2'}}.
-V1 = '$1',
-{'NamedType',V1#identifier.pos,{V1#identifier.val,'$2'}}.
-NamedType -> SelectionType :'$1'.
-
-ReferencedType -> DefinedType : '$1'.
-% redundant ReferencedType -> UsefulType : 'fix'.
-ReferencedType -> SelectionType : '$1'.
-ReferencedType -> TypeFromObject : '$1'.
-% later ReferencedType -> ValueSetFromObjects : 'fix'.
-
-% to much conflicts Value -> AnyValue :'$1'.
-Value -> ValueNotNull : '$1'.
-Value -> 'NULL' :element(1,'$1').
-
-ValueNotNull -> BuiltinValue :'$1'.
-% inlined Value -> DefinedValue :'$1'. % DefinedValue , identifier
-% inlined Externalvaluereference -> Externalvaluereference :'$1'.
-ValueNotNull -> typereference '.' identifier :
- #'Externalvaluereference'{pos=element(2,'$1'),module=element(3,'$1'),
- value=element(3,'$3')}.
-ValueNotNull -> identifier :'$1'.
-
-
-%tmp Value -> NamedNumber: '$1'. % not a value but part of ObjIdC
-% redundant BuiltinValue -> BitStringValue :'$1'.
-BuiltinValue -> BooleanValue :'$1'.
-BuiltinValue -> CharacterStringValue :'$1'.
-BuiltinValue -> ChoiceValue :'$1'.
-% BuiltinValue -> EmbeddedPDVValue :'$1'. ==SequenceValue
-% BuiltinValue -> EnumeratedValue :'$1'. identifier
-% BuiltinValue -> ExternalValue :'$1'. ==SequenceValue
-% later BuiltinValue -> InstanceOfValue :'$1'.
-BuiltinValue -> SignedNumber :'$1'.
-% BuiltinValue -> 'NULL' :'$1'.
-% later BuiltinValue -> ObjectClassFieldValue :'$1'.
-% replaced by SAndSOfValue BuiltinValue -> ObjectIdentifierValue :'$1'.
-BuiltinValue -> bstring :element(3,'$1').
-BuiltinValue -> hstring :element(3,'$1').
-% conflict BuiltinValue -> RealValue :'$1'.
-BuiltinValue -> SAndSOfValue :'$1'.
-% replaced BuiltinValue -> SequenceOfValue :'$1'.
-% replaced BuiltinValue -> SequenceValue :'$1'.
-% replaced BuiltinValue -> SetValue :'$1'.
-% replaced BuiltinValue -> SetOfValue :'$1'.
-% conflict redundant BuiltinValue -> TaggedValue :'$1'.
-
-% inlined ReferencedValue -> DefinedValue:'$1'.
-% ReferencedValue -> Externalvaluereference:'$1'.
-% ReferencedValue -> identifier :'$1'.
-% later ReferencedValue -> ValueFromObject:'$1'.
-
-% inlined BooleanType -> BOOLEAN :'BOOLEAN'.
-
-% to much conflicts AnyValue -> Type ':' Value : {'ANYVALUE',{'$1','$3'}}.
-
-BooleanValue -> TRUE :true.
-BooleanValue -> FALSE :false.
-
-IntegerType -> 'INTEGER' : 'INTEGER'.
-IntegerType -> 'INTEGER' '{' NamedNumberList '}' : {'INTEGER','$3'}.
-
-NamedNumberList -> NamedNumber :['$1'].
-% modified NamedNumberList -> NamedNumberList ',' NamedNumber :'fix'.
-NamedNumberList -> NamedNumber ',' NamedNumberList :['$1'|'$3'].
-
-NamedNumber -> identifier '(' SignedNumber ')' : {'NamedNumber',element(3,'$1'),'$3'}.
-NamedNumber -> identifier '(' typereference '.' identifier ')' : {'NamedNumber',element(3,'$1'),{'ExternalValue',element(3,'$3'),element(3,'$5')}}.
-NamedNumber -> identifier '(' identifier ')' : {'NamedNumber',element(3,'$1'),element(3,'$3')}.
-
-%NamedValue -> identifier Value :
-% {'NamedValue',element(2,'$1'),element(3,'$1'),'$2'}.
-
-
-SignedNumber -> number : element(3,'$1').
-SignedNumber -> '-' number : - element(3,'$1').
-
-% inlined IntegerValue -> SignedNumber :'$1'.
-% conflict moved to Value IntegerValue -> identifier:'$1'.
-
-EnumeratedType -> ENUMERATED '{' Enumeration '}' :{'ENUMERATED','$3'}.
-
-% inlined Enumerations -> Enumeration :{'$1','false',[]}.
-% inlined Enumerations -> Enumeration ',' '...' : {'$1','true',[]}.
-% inlined Enumerations -> Enumeration ',' '...' ',' Enumeration : {'$1','true','$5'}.
-
-Enumeration -> EnumerationItem :['$1'].
-% modified Enumeration -> EnumerationItem ',' Enumeration :'fix'.
-Enumeration -> EnumerationItem ',' Enumeration :['$1'|'$3'].
-
-EnumerationItem -> identifier:element(3,'$1').
-EnumerationItem -> NamedNumber :'$1'.
-EnumerationItem -> '...' :'EXTENSIONMARK'.
-
-% conflict moved to Value EnumeratedValue -> identifier:'$1'.
-
-% inlined RealType -> REAL:'REAL'.
-
-RealValue -> NumericRealValue :'$1'.
-RealValue -> SpecialRealValue:'$1'.
-
-% ?? NumericRealValue -> number:'$1'. % number MUST BE '0'
-NumericRealValue -> SAndSOfValue : '$1'. % Value of the associated sequence type
-
-SpecialRealValue -> 'PLUS-INFINITY' :'$1'.
-SpecialRealValue -> 'MINUS-INFINITY' :'$1'.
-
-BitStringType -> 'BIT' 'STRING' :{'BIT STRING',[]}.
-BitStringType -> 'BIT' 'STRING' '{' NamedNumberList '}' :{'BIT STRING','$4'}.
-% NamedBitList replaced by NamedNumberList to reduce the grammar
-% Must check later that all "numbers" are positive
-
-% inlined BitStringValue -> bstring:'$1'.
-% inlined BitStringValue -> hstring:'$1'.
-% redundant use SequenceValue BitStringValue -> '{' IdentifierList '}' :$2.
-% redundant use SequenceValue BitStringValue -> '{' '}' :'fix'.
-
-IdentifierList -> identifier :[element(3,'$1')].
-% modified IdentifierList -> IdentifierList ',' identifier :'$1'.
-IdentifierList -> identifier ',' IdentifierList :[element(3,'$1')|'$3'].
-
-% inlined OctetStringType -> 'OCTET' 'STRING' :'OCTET STRING'.
-
-% inlined OctetStringValue -> bstring:'$1'.
-% inlined OctetStringValue -> hstring:'$1'.
-
-% inlined NullType -> 'NULL':'NULL'.
-
-% inlined NullValue -> NULL:'NULL'.
-
-% result is {'SEQUENCE',Optionals,Extensionmark,Componenttypelist}.
-SequenceType -> SEQUENCE '{' ComponentTypeList '}' :{'SEQUENCE','$3'}.
-% SequenceType -> SEQUENCE '{' ComponentTypeLists '}' :{'SEQUENCE','$3'}.
-% SequenceType -> SEQUENCE '{' ExtensionAndException '}' :{'SEQUENCE','$3'}.
-SequenceType -> SEQUENCE '{' '}' :{'SEQUENCE',[]}.
-
-% result is {RootComponentList,ExtensionAndException,AdditionalComponentTypeList}.
-%ComponentTypeLists -> ComponentTypeList ',' ExtensionAndException :{'$1','$3',[]}.
-%ComponentTypeLists -> ComponentTypeList :{'$1','false',[]}.
-%ComponentTypeLists -> ComponentTypeList ',' ExtensionAndException
-% ',' ComponentTypeList :{'$1','$3', '$5'}.
-%ComponentTypeLists -> ExtensionAndException ',' ComponentTypeList :{[],'$1','$3'}.
-
-ComponentTypeList -> ComponentType :['$1'].
-% modified below ComponentTypeList -> ComponentTypeList ',' ComponentType :'$1'.
-ComponentTypeList -> ComponentType ',' ComponentTypeList :['$1'|'$3'].
-
-% -record('ComponentType',{pos,name,type,attrib}).
-ComponentType -> '...' ExceptionSpec :{'EXTENSIONMARK',element(2,'$1'),'$2'}.
-ComponentType -> NamedType :
- {'NamedType',Pos,{Name,Type}} = '$1',
- #'ComponentType'{pos=Pos,name=Name,typespec=Type,prop=mandatory}.
-ComponentType -> NamedType 'OPTIONAL' :
- {'NamedType',Pos,{Name,Type}} = '$1',
- #'ComponentType'{pos=Pos,name=Name,typespec=Type,prop='OPTIONAL'}.
-ComponentType -> NamedType 'DEFAULT' Value:
- {'NamedType',Pos,{Name,Type}} = '$1',
- #'ComponentType'{pos=Pos,name=Name,typespec=Type,prop={'DEFAULT','$3'}}.
-ComponentType -> 'COMPONENTS' 'OF' Type :{'COMPONENTS OF','$3'}.
-
-% redundant ExtensionAndException -> '...' : extensionmark.
-% ExtensionAndException -> '...' ExceptionSpec : {extensionmark,'$2'}.
-
-% replaced SequenceValue -> '{' ComponentValueList '}':'$2'.
-% replaced SequenceValue -> '{' '}':[].
-
-ValueList -> Value :['$1'].
-ValueList -> NamedNumber :['$1'].
-% modified ValueList -> ValueList ',' Value :'$1'.
-ValueList -> Value ',' ValueList :['$1'|'$3'].
-ValueList -> Value ',' '...' :['$1' |[]].
-ValueList -> Value ValueList : ['$1',space|'$2'].
-ValueList -> NamedNumber ValueList: ['$1',space|'$2'].
-
-%ComponentValueList -> identifier ObjIdComponent:[{'NamedValue','$1','$2'}].
-%ComponentValueList -> NamedValue :['$1'].
-%ComponentValueList -> NamedValue ',' ComponentValueList:['$1'|'$3'].
-%ComponentValueList -> identifier ObjIdComponent ',' ComponentValueList :[{'NamedValue', '$1','$2'}|'$4'].
-
-SequenceOfType -> SEQUENCE OF Type : {'SEQUENCE OF','$3'}.
-
-% replaced SequenceOfValue with SAndSOfValue
-
-SAndSOfValue -> '{' ValueList '}' :'$2'.
-%SAndSOfValue -> '{' ComponentValueList '}' :'$2'.
-SAndSOfValue -> '{' '}' :[].
-
-% save for later SetType ->
-% result is {'SET',Optionals,Extensionmark,Componenttypelist}.
-SetType -> SET '{' ComponentTypeList '}' :{'SET','$3'}.
-% SetType -> SET '{' ExtensionAndException '}' :{'SET','$3'}.
-SetType -> SET '{' '}' :{'SET',[]}.
-
-% replaced SetValue with SAndSOfValue
-
-SetOfType -> SET OF Type : {'SET OF','$3'}.
-
-% replaced SetOfValue with SAndSOfValue
-
-ChoiceType -> 'CHOICE' '{' ComponentTypeList '}' :{'CHOICE','$3'}.
-% AlternativeTypeList is replaced by ComponentTypeList
-ChoiceValue -> identifier ':' Value : {'ChoiceValue',element(3,'$1'),'$3'}.
-% save for later SelectionType ->
-
-TaggedType -> Tag Type : '$2'#type{tag=['$1'#tag{type={default,get(tagdefault)}}]}.
-TaggedType -> Tag IMPLICIT Type :'$3'#type{tag=['$1'#tag{type='IMPLICIT'}]}.
-TaggedType -> Tag EXPLICIT Type :'$3'#type{tag=['$1'#tag{type='EXPLICIT'}]}.
-
-Tag -> '[' Class ClassNumber ']': #tag{class='$2',number='$3'}.
-Tag -> '[' Class typereference '.' identifier ']':
- #tag{class='$2',number=#'Externalvaluereference'{pos=element(2,'$3'),module=element(3,'$3'),
- value=element(3,'$5')}}.
-Tag -> '[' Class number ']': #tag{class='$2',number=element(3,'$3')}.
-Tag -> '[' Class identifier ']': #tag{class='$2',number=element(3,'$3')}.
-
-ClassNumber -> number :element(3,'$1').
-% inlined above ClassNumber -> typereference '.' identifier :{'Externalvaluereference',element(3,'$1'),element(3,'$3')}.
-ClassNumber -> identifier :element(3,'$1').
-
-Class -> 'UNIVERSAL' :element(1,'$1').
-Class -> 'APPLICATION' :element(1,'$1').
-Class -> 'PRIVATE' :element(1,'$1').
-Class -> '$empty' :'CONTEXT'.
-
-% conflict redundant TaggedValue -> Value:'$1'.
-
-% inlined EmbeddedPDVType -> 'EMBEDDED' 'PDV' :'EMBEDDED PDV'.
-
-% inlined EmbeddedPDVValue -> SequenceValue:'$1'.
-
-% inlined ExternalType -> 'EXTERNAL' :'EXTERNAL'.
-
-% inlined ExternalValue -> SequenceValue :'$1'.
-
-% inlined ObjectIdentifierType -> 'OBJECT' 'IDENTIFIER' :'OBJECT IDENTIFIER'.
-
-ObjectIdentifierValue -> '{' ObjIdComponentList '}' :'$2'.
-% inlined ObjectIdentifierValue -> SequenceAndSequenceOfValue :'$1'.
-% ObjectIdentifierValue -> '{' identifier ObjIdComponentList '}' :{'ObjectIdentifierValue','$2','$3'}.
-% ObjectIdentifierValue -> '{' typereference '.' identifier ObjIdComponentList '}' :{'ObjectIdentifierValue',{'$2','$4'},'$5'}.
-
-ObjIdComponentList -> Value:'$1'.
-ObjIdComponentList -> Value ObjIdComponentList :['$1'|'$2'].
-%ObjIdComponentList -> DefinedValue:'$1'.
-%ObjIdComponentList -> number:'$1'.
-%ObjIdComponentList -> DefinedValue ObjIdComponentList :['$1'|'$2'].
-%ObjIdComponentList -> number ObjIdComponentList :['$1'|'$2'].
-%ObjIdComponentList -> ObjIdComponent ObjIdComponentList :['$1'|'$2'].
-%ObjIdComponentList -> ObjIdComponent ObjIdComponentList :['$1'|'$2'].
-
-% redundant ObjIdComponent -> NameForm :'$1'. % expanded
-% replaced by 2 ObjIdComponent -> NumberForm :'$1'.
-% ObjIdComponent -> number :'$1'.
-% ObjIdComponent -> DefinedValue :'$1'. % means DefinedValue
-% ObjIdComponent -> NameAndNumberForm :'$1'.
-% ObjIdComponent -> NamedNumber :'$1'.
-% NamedBit replaced by NamedNumber to reduce grammar
-% must check later that "number" is positive
-
-% NameForm -> identifier:'$1'.
-
-% inlined NumberForm -> number :'$1'.
-% inlined NumberForm -> DefinedValue :'$1'.
-
-% replaced by NamedBit NameAndNumberForm -> identifier '(' NumberForm ')'.
-% NameAndNumberForm -> NamedBit:'$1'.
-
-
-CharacterStringType -> restrictedcharacterstringtype :element(3,'$1').
-CharacterStringType -> 'CHARACTER' 'STRING' :'CHARACTER STRING'.
-
-RestrictedCharacterStringValue -> cstring :element(3, '$1').
-% modified below RestrictedCharacterStringValue -> CharacterStringList :'$1'.
-% conflict vs BuiltinValue RestrictedCharacterStringValue -> SequenceAndSequenceOfValue :'$1'.
-RestrictedCharacterStringValue -> Quadruple :'$1'.
-RestrictedCharacterStringValue -> Tuple :'$1'.
-
-% redundant CharacterStringList -> '{' ValueList '}' :'$2'. % modified
-
-% redundant CharSyms -> CharsDefn :'$1'.
-% redundant CharSyms -> CharSyms ',' CharsDefn :['$1'|'$3'].
-
-% redundant CharsDefn -> cstring :'$1'.
-% temporary replaced see below CharsDefn -> DefinedValue :'$1'.
-% redundant CharsDefn -> Value :'$1'.
-
-Quadruple -> '{' number ',' number ',' number ',' number '}' :{'Quadruple','$2','$4','$6','$8'}.
-% {Group,Plane,Row,Cell}
-
-Tuple -> '{' number ',' number '}' :{'Tuple', '$2','$4'}.
-% {TableColumn,TableRow}
-
-% inlined UnrestrictedCharacterString -> 'CHARACTER' 'STRING' :'CHARACTER STRING'.
-
-CharacterStringValue -> RestrictedCharacterStringValue :'$1'.
-% conflict vs BuiltinValue CharacterStringValue -> SequenceValue :'$1'. % UnrestrictedCharacterStringValue
-
-% inlined UsefulType -> typereference :'$1'.
-
-SelectionType -> identifier '<' Type : {'SelectionType',element(3,'$1'),'$3'}.
-
-ConstrainedType -> Type Constraint :
- '$1'#type{constraint=merge_constraints(['$2'])}.
-ConstrainedType -> Type Constraint Constraint :
- '$1'#type{constraint=merge_constraints(['$2','$3'])}.
-ConstrainedType -> Type Constraint Constraint Constraint:
- '$1'#type{constraint=merge_constraints(['$2','$3','$4'])}.
-ConstrainedType -> Type Constraint Constraint Constraint Constraint:
- '$1'#type{constraint=merge_constraints(['$2','$3','$4','$5'])}.
-%ConstrainedType -> Type Constraint :'$1'#type{constraint='$2'}.
-%ConstrainedType -> Type Constraint :'$1'#type{constraint='$2'}.
-ConstrainedType -> TypeWithConstraint :'$1'.
-
-TypeWithConstraint -> 'SET' Constraint 'OF' Type :
- #type{def = {'SET OF','$4'},constraint=merge_constraints(['$2'])}.
-TypeWithConstraint -> 'SET' 'SIZE' Constraint 'OF' Type :
- #type{def = {'SET OF','$5'},constraint = merge_constraints([#constraint{c={'SizeConstraint','$3'#constraint.c}}])}.
-TypeWithConstraint -> 'SEQUENCE' Constraint 'OF' Type :
- #type{def = {'SEQUENCE OF','$4'},constraint =
- merge_constraints(['$2'])}.
-TypeWithConstraint -> 'SEQUENCE' 'SIZE' Constraint 'OF' Type :
- #type{def = {'SEQUENCE OF','$5'},constraint = merge_constraints([#constraint{c={'SizeConstraint','$3'#constraint.c}}])}.
-
-
-Constraint -> '(' ConstraintSpec ExceptionSpec ')' :
- #constraint{c='$2',e='$3'}.
-
-% inlined Constraint -> SubTypeConstraint :'$1'.
-ConstraintSpec -> ElementSetSpecs :'$1'.
-ConstraintSpec -> UserDefinedConstraint :'$1'.
-ConstraintSpec -> TableConstraint :'$1'.
-
-TableConstraint -> ComponentRelationConstraint : '$1'.
-TableConstraint -> ObjectSet : '$1'.
-%TableConstraint -> '{' typereference '}' :tableconstraint.
-
-ComponentRelationConstraint -> '{' typereference '}' '{' '@' ComponentIdList '}' : componentrelation.
-ComponentRelationConstraint -> '{' typereference '}' '{' '@' '.' ComponentIdList '}' : componentrelation.
-
-ComponentIdList -> identifier: ['$1'].
-ComponentIdList -> identifier '.' ComponentIdList: ['$1'| '$3'].
-
-
-% later ConstraintSpec -> GeneralConstraint :'$1'.
-
-% from X.682
-UserDefinedConstraint -> 'CONSTRAINED' 'BY' '{' '}' : {constrained_by,[]}.
-UserDefinedConstraint -> 'CONSTRAINED' 'BY'
- '{' UserDefinedConstraintParameters '}' : {constrained_by,'$4'}.
-
-UserDefinedConstraintParameters -> UserDefinedConstraintParameter : ['$1'].
-UserDefinedConstraintParameters ->
- UserDefinedConstraintParameter ','
- UserDefinedConstraintParameters: ['$1'|'$3'].
-
-UserDefinedConstraintParameter -> Type '.' ActualParameter : {'$1','$3'}.
-UserDefinedConstraintParameter -> ActualParameter : '$1'.
-
-
-
-ExceptionSpec -> '!' ExceptionIdentification : '$1'.
-ExceptionSpec -> '$empty' : undefined.
-
-ExceptionIdentification -> SignedNumber : '$1'.
-% inlined ExceptionIdentification -> DefinedValue : '$1'.
-ExceptionIdentification -> typereference '.' identifier :
- #'Externalvaluereference'{pos=element(2,'$1'),module=element(3,'$1'),
- value=element(3,'$1')}.
-ExceptionIdentification -> identifier :'$1'.
-ExceptionIdentification -> Type ':' Value : {'$1','$3'}.
-
-% inlined SubTypeConstraint -> ElementSetSpec
-
-ElementSetSpecs -> ElementSetSpec : '$1'.
-ElementSetSpecs -> ElementSetSpec ',' '...': {'$1',[]}.
-ElementSetSpecs -> '...' ',' ElementSetSpec : {[],'$3'}.
-ElementSetSpecs -> ElementSetSpec ',' '...' ',' ElementSetSpec : {'$1','$5'}.
-
-ElementSetSpec -> Unions : '$1'.
-ElementSetSpec -> 'ALL' Exclusions : {'ALL','$2'}.
-
-Unions -> Intersections : '$1'.
-Unions -> UElems UnionMark IntersectionElements :
- case {'$1','$3'} of
- {{'SingleValue',V1},{'SingleValue',V2}} ->
- {'SingleValue',ordsets:union(to_set(V1),to_set(V2))}
- end.
-
-UElems -> Unions :'$1'.
-
-Intersections -> IntersectionElements :'$1'.
-Intersections -> IElems IntersectionMark IntersectionElements :
- case {'$1','$3'} of
- {{'SingleValue',V1},{'SingleValue',V2}} ->
- {'SingleValue',ordsets:intersection(to_set(V1),to_set(V2))};
- {V1,V2} when list(V1) ->
- V1 ++ [V2];
- {V1,V2} ->
- [V1,V2]
- end.
-%Intersections -> IElems '^' IntersectionElements :{'INTERSECTION','$1','$3'}.
-%Intersections -> IElems 'INTERSECTION' IntersectionElements :{'INTERSECTION','$1','$3'}.
-
-IElems -> Intersections :'$1'.
-
-IntersectionElements -> Elements :'$1'.
-IntersectionElements -> Elems Exclusions :{'$1','$2'}.
-
-Elems -> Elements :'$1'.
-
-Exclusions -> 'EXCEPT' Elements :{'EXCEPT','$2'}.
-
-IntersectionMark -> 'INTERSECTION':'$1'.
-IntersectionMark -> '^':'$1'.
-UnionMark -> 'UNION':'$1'.
-UnionMark -> '|':'$1'.
-
-
-Elements -> SubTypeElements : '$1'.
-%Elements -> ObjectSetElements : '$1'.
-Elements -> '(' ElementSetSpec ')' : '$2'.
-Elements -> ReferencedType : '$1'.
-
-SubTypeElements -> ValueList : {'SingleValue','$1'}. % NOTE it must be a Value
-% The rule above modifyed only because of conflicts
-SubTypeElements -> 'INCLUDES' Type : {'ContainedSubType','$2'}.
-%not lalr1 if this is activated SubTypeElements -> Type : {'TypeConstraint','$1'}.
-SubTypeElements -> LowerEndpoint '..' UpperEndpoint : {'ValueRange',{'$1','$3'}}.
-SubTypeElements -> 'FROM' Constraint : {'PermittedAlphabet','$2'#constraint.c}.
-SubTypeElements -> 'SIZE' Constraint: {'SizeConstraint','$2'#constraint.c}.
-% later will introduce conflicts related to NULL SubTypeElements -> Type : {'TypeConstraint','$1'}.
-SubTypeElements -> 'WITH' 'COMPONENT' Constraint:{'WITH COMPONENT','$3'}.
-SubTypeElements -> 'WITH' 'COMPONENTS' '{' TypeConstraints '}':{'WITH COMPONENTS',{'FullSpecification','$4'}}.
-SubTypeElements -> 'WITH' 'COMPONENTS' '{' '...' ',' TypeConstraints '}' :{'WITH COMPONENTS',{'PartialSpecification','$3'}}.
-
-% inlined above InnerTypeConstraints ::=
-% inlined above SingleTypeConstraint::= Constraint
-% inlined above MultipleTypeConstraints ::= FullSpecification | PartialSpecification
-% inlined above FullSpecification ::= "{" TypeConstraints "}"
-% inlined above PartialSpecification ::= "{" "..." "," TypeConstraints "}"
-% TypeConstraints -> identifier : [{'NamedConstraint',element(3,'$1'),undefined,undefined}]. % is this really meaningful or allowed
-TypeConstraints -> NamedConstraint : ['$1'].
-TypeConstraints -> NamedConstraint ',' TypeConstraints : ['$1'|'$3'].
-TypeConstraints -> identifier : ['$1'].
-TypeConstraints -> identifier ',' TypeConstraints : ['$1'|'$3'].
-
-NamedConstraint -> identifier Constraint PresenceConstraint :{'NamedConstraint',element(3,'$1'),'$2','$3'}.
-NamedConstraint -> identifier Constraint :{'NamedConstraint',element(3,'$1'),'$2',undefined}.
-NamedConstraint -> identifier PresenceConstraint :{'NamedConstraint',element(3,'$1'),undefined,'$2'}.
-
-PresenceConstraint -> 'PRESENT' : 'PRESENT'.
-PresenceConstraint -> 'ABSENT' : 'ABSENT'.
-PresenceConstraint -> 'OPTIONAL' : 'OPTIONAL'.
-
-
-
-LowerEndpoint -> LowerEndValue :'$1'.
-%LowerEndpoint -> LowerEndValue '<':{gt,'$1'}.
-LowerEndpoint -> LowerEndValue '<':('$1'+1).
-
-UpperEndpoint -> UpperEndValue :'$1'.
-%UpperEndpoint -> '<' UpperEndValue :{lt,'$2'}.
-UpperEndpoint -> '<' UpperEndValue :('$2'-1).
-
-LowerEndValue -> Value :'$1'.
-LowerEndValue -> 'MIN' :'MIN'.
-
-UpperEndValue -> Value :'$1'.
-UpperEndValue -> 'MAX' :'MAX'.
-
-
-% X.681
-
-
-% X.681 chap 15
-
-%TypeFromObject -> ReferencedObjects '.' FieldName : {'$1','$3'}.
-TypeFromObject -> typereference '.' FieldName : {'$1','$3'}.
-
-ReferencedObjects -> typereference : '$1'.
-%ReferencedObjects -> ParameterizedObject
-%ReferencedObjects -> DefinedObjectSet
-%ReferencedObjects -> ParameterizedObjectSet
-
-FieldName -> typefieldreference : ['$1'].
-FieldName -> valuefieldreference : ['$1'].
-FieldName -> FieldName '.' FieldName : ['$1' | '$3'].
-
-PrimitiveFieldName -> typefieldreference : '$1'.
-PrimitiveFieldName -> valuefieldreference : '$1'.
-
-%ObjectSetAssignment -> typereference DefinedObjectClass '::=' ObjectSet: null.
-ObjectSetAssignment -> typereference typereference '::=' ObjectSet :
- #typedef{pos=element(2,'$1'),name=element(3,'$1'),typespec={'ObjectSet',element(3,'$2'), '$4'}}.
-ObjectSetAssignment -> typereference typereference '.' typereference '::=' ObjectSet.
-
-ObjectSet -> '{' ElementSetSpecs '}' : '$2'.
-ObjectSet -> '{' '...' '}' : ['EXTENSIONMARK'].
-
-%ObjectSetElements -> Object.
-% ObjectSetElements -> identifier : '$1'.
-%ObjectSetElements -> DefinedObjectSet.
-%ObjectSetElements -> ObjectSetFromObjects.
-%ObjectSetElements -> ParameterizedObjectSet.
-
-%ObjectAssignment -> identifier DefinedObjectClass '::=' Object.
-ObjectAssignment -> ValueAssignment.
-%ObjectAssignment -> identifier typereference '::=' Object.
-%ObjectAssignment -> identifier typereference '.' typereference '::=' Object.
-
-%Object -> DefinedObject: '$1'.
-%Object -> ExternalObjectReference: '$1'.%Object -> DefinedObject: '$1'.
-Object -> typereference '.' identifier: '$1'.%Object -> DefinedObject: '$1'.
-Object -> identifier: '$1'.%Object -> DefinedObject: '$1'.
-
-%Object -> ObjectDefn -> DefaultSyntax: '$1'.
-Object -> '{' FieldSetting ',' FieldSettings '}' : ['$2'|'$4'].
-Object -> '{' FieldSetting '}' :['$2'].
-
-%% For User-friendly notation
-%% Object -> ObjectDefn -> DefinedSyntax
-Object -> '{' '}'.
-Object -> '{' DefinedSyntaxTokens '}'.
-
-% later Object -> ParameterizedObject: '$1'. look in x.683
-
-%DefinedObject -> ExternalObjectReference: '$1'.
-%DefinedObject -> identifier: '$1'.
-
-DefinedObjectClass -> typereference.
-%DefinedObjectClass -> objectclassreference.
-DefinedObjectClass -> ExternalObjectClassReference.
-%DefinedObjectClass -> typereference '.' objectclassreference.
-%%DefinedObjectClass -> UsefulObjectClassReference.
-
-ExternalObjectReference -> typereference '.' identifier.
-ExternalObjectClassReference -> typereference '.' typereference.
-%%ExternalObjectClassReference -> typereference '.' objectclassreference.
-
-ObjectDefn -> DefaultSyntax: '$1'.
-%ObjectDefn -> DefinedSyntax: '$1'.
-
-ObjectFromObject -> ReferencedObjects '.' FieldName : {'ObjectFromObject','$1','$3'}.
-
-% later look in x.683 ParameterizedObject ->
-
-%DefaultSyntax -> '{' '}'.
-%DefaultSyntax -> '{' FieldSettings '}': '$2'.
-DefaultSyntax -> '{' FieldSetting ',' FieldSettings '}': '$2'.
-DefaultSyntax -> '{' FieldSetting '}': '$2'.
-
-FieldSetting -> PrimitiveFieldName Setting: {'$1','$2'}.
-
-FieldSettings -> FieldSetting ',' FieldSettings: ['$1'|'$3'].
-FieldSettings -> FieldSetting ',' FieldSettings: ['$1'|'$3'].
-FieldSettings -> FieldSetting: '$1'.
-
-%DefinedSyntax -> '{' '}'.
-DefinedSyntax -> '{' DefinedSyntaxTokens '}': '$2'.
-
-DefinedSyntaxTokens -> DefinedSyntaxToken: '$1'.
-DefinedSyntaxTokens -> DefinedSyntaxToken DefinedSyntaxTokens: ['$1'|'$2'].
-
-% expanded DefinedSyntaxToken -> Literal: '$1'.
-%DefinedSyntaxToken -> typereference: '$1'.
-DefinedSyntaxToken -> word: '$1'.
-DefinedSyntaxToken -> ',': '$1'.
-DefinedSyntaxToken -> Setting: '$1'.
-%DefinedSyntaxToken -> '$empty': nil .
-
-% Setting ::= Type|Value|ValueSet|Object|ObjectSet
-Setting -> Type: '$1'.
-%Setting -> Value: '$1'.
-%Setting -> ValueNotNull: '$1'.
-Setting -> BuiltinValue: '$1'.
-Setting -> ValueSet: '$1'.
-%Setting -> Object: '$1'.
-%Setting -> ExternalObjectReference.
-Setting -> typereference '.' identifier.
-Setting -> identifier.
-Setting -> ObjectDefn.
-
-Setting -> ObjectSet: '$1'.
-
-
-Erlang code.
-%%-author('[email protected]').
--copyright('Copyright (c) 1991-99 Ericsson Telecom AB').
--vsn('$Revision: /main/release/1 $').
--include("asn1_records.hrl").
-
-to_set(V) when list(V) ->
- ordsets:list_to_set(V);
-to_set(V) ->
- ordsets:list_to_set([V]).
-
-merge_constraints({Rlist,ExtList}) -> % extensionmarker in constraint
- {merge_constraints(Rlist,[],[]),
- merge_constraints(ExtList,[],[])};
-
-merge_constraints(Clist) ->
- merge_constraints(Clist, [], []).
-
-merge_constraints([Ch|Ct],Cacc, Eacc) ->
- NewEacc = case Ch#constraint.e of
- undefined -> Eacc;
- E -> [E|Eacc]
- end,
- merge_constraints(Ct,[fixup_constraint(Ch#constraint.c)|Cacc],NewEacc);
-
-merge_constraints([],Cacc,[]) ->
- lists:flatten(Cacc);
-merge_constraints([],Cacc,Eacc) ->
- lists:flatten(Cacc) ++ [{'Errors',Eacc}].
-
-fixup_constraint(C) ->
- case C of
- {'SingleValue',V} when list(V) ->
- [C,
- {'ValueRange',{lists:min(V),lists:max(V)}}];
- {'PermittedAlphabet',{'SingleValue',V}} when list(V) ->
- V2 = {'SingleValue',
- ordsets:list_to_set(lists:flatten(V))},
- {'PermittedAlphabet',V2};
- {'PermittedAlphabet',{'SingleValue',V}} ->
- V2 = {'SingleValue',[V]},
- {'PermittedAlphabet',V2};
- {'SizeConstraint',Sc} ->
- {'SizeConstraint',fixup_size_constraint(Sc)};
-
- List when list(List) ->
- [fixup_constraint(Xc)||Xc <- List];
- Other ->
- Other
- end.
-
-fixup_size_constraint({'ValueRange',{Lb,Ub}}) ->
- {Lb,Ub};
-fixup_size_constraint({{'ValueRange',R},[]}) ->
- {R,[]};
-fixup_size_constraint({[],{'ValueRange',R}}) ->
- {[],R};
-fixup_size_constraint({{'ValueRange',R1},{'ValueRange',R2}}) ->
- {R1,R2};
-fixup_size_constraint({'SingleValue',[Sv]}) ->
- fixup_size_constraint({'SingleValue',Sv});
-fixup_size_constraint({'SingleValue',L}) when list(L) ->
- ordsets:list_to_set(L);
-fixup_size_constraint({'SingleValue',L}) ->
- {L,L};
-fixup_size_constraint({C1,C2}) ->
- {fixup_size_constraint(C1), fixup_size_constraint(C2)}.
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/lib/asn1/src/asn1ct_parser2.erl b/lib/asn1/src/asn1ct_parser2.erl
index 3891fce8d3..488e4af4e0 100644
--- a/lib/asn1/src/asn1ct_parser2.erl
+++ b/lib/asn1/src/asn1ct_parser2.erl
@@ -20,7 +20,7 @@
%%
-module(asn1ct_parser2).
--export([parse/1]).
+-export([parse/2,format_error/1]).
-include("asn1_records.hrl").
%% Only used internally within this module.
@@ -28,26 +28,34 @@
-record(constraint, {c,e}).
-record(identifier, {pos,val}).
-%% parse all types in module
-parse(Tokens) ->
- case catch parse_ModuleDefinition(Tokens) of
- {'EXIT',Reason} ->
- {error,{{undefined,get(asn1_module),
- [internal,error,'when',parsing,module,definition,Reason]},
- hd(Tokens)}};
- {asn1_error,Reason} ->
- {error,{Reason,hd(Tokens)}};
- {ModuleDefinition,Rest1} ->
- {Types,Rest2} = parse_AssignmentList(Rest1),
- clean_process_dictionary(),
- case Rest2 of
- [{'END',_}|_Rest3] ->
- {ok,ModuleDefinition#module{typeorval = Types}};
- _ ->
- {error,{{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'END']},
- hd(Rest2)}}
- end
+parse(File0, Tokens0) ->
+ try do_parse(Tokens0) of
+ {ok,#module{}}=Result ->
+ Result
+ catch
+ throw:{asn1_error,Fun} when is_function(Fun, 0) ->
+ handle_parse_error(File0, Fun());
+ throw:{asn1_error,{parse_error,Tokens}} ->
+ handle_parse_error(File0, Tokens)
+ after
+ clean_process_dictionary()
+ end.
+
+handle_parse_error(File0, [Token|_]) ->
+ File = filename:basename(File0),
+ Line = get_line(Token),
+ Error = {structured_error,{File,Line},?MODULE,
+ {syntax_error,get_token(Token)}},
+ {error,[Error]}.
+
+do_parse(Tokens0) ->
+ {ModuleDefinition,Tokens1} = parse_ModuleDefinition(Tokens0),
+ {Types,Tokens2} = parse_AssignmentList(Tokens1),
+ case Tokens2 of
+ [{'END',_}|_Rest3] ->
+ {ok,ModuleDefinition#module{typeorval=Types}};
+ _ ->
+ parse_error(Tokens2)
end.
clean_process_dictionary() ->
@@ -57,6 +65,11 @@ clean_process_dictionary() ->
_ = erase(extensiondefault),
ok.
+format_error({syntax_error,Token}) when is_atom(Token) ->
+ io_lib:format("syntax error before: '~s'", [Token]);
+format_error({syntax_error,Token}) ->
+ io_lib:format("syntax error before: '~p'", [Token]).
+
parse_ModuleDefinition([{typereference,L1,ModuleIdentifier}|Rest0]) ->
put(asn1_module,ModuleIdentifier),
{_DefinitiveIdentifier,Rest02} =
@@ -70,9 +83,7 @@ parse_ModuleDefinition([{typereference,L1,ModuleIdentifier}|Rest0]) ->
[{'DEFINITIONS',_}|Rest03] ->
Rest03;
_ ->
- throw({asn1_error,{get_line(hd(Rest02)),get(asn1_module),
- [got,get_token(hd(Rest02)),
- expected,'DEFINITIONS']}})
+ parse_error(Rest02)
end,
{TagDefault,Rest2} =
case Rest of
@@ -104,12 +115,11 @@ parse_ModuleDefinition([{typereference,L1,ModuleIdentifier}|Rest0]) ->
extensiondefault = ExtensionDefault,
exports = Exports,
imports = {imports, Imports}}, Rest6};
- _ -> throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
- [got,get_token(hd(Rest3)),expected,"::= BEGIN"]}})
+ _ ->
+ parse_error(Rest3)
end;
parse_ModuleDefinition(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,typereference]}}).
+ parse_error(Tokens).
parse_Exports([{'EXPORTS',_L1},{';',_L2}|Rest]) ->
{{exports,[]},Rest};
@@ -122,8 +132,7 @@ parse_Exports([{'EXPORTS',_L1}|Rest]) ->
[{';',_}|Rest3] ->
{{exports,SymbolList},Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,';']}})
+ parse_error(Rest2)
end;
parse_Exports(Rest) ->
{{exports,all},Rest}.
@@ -137,29 +146,25 @@ parse_SymbolList(Tokens,Acc) ->
[{',',_L1}|Rest2] ->
parse_SymbolList(Rest2,[Symbol|Acc]);
Rest2 ->
- {lists:reverse([Symbol|Acc]),Rest2}
+ {lists:reverse(Acc, [Symbol]),Rest2}
end.
parse_Symbol(Tokens) ->
parse_Reference(Tokens).
parse_Reference([{typereference,L1,TrefName},{'{',_L2},{'}',_L3}|Rest]) ->
-% {Tref,Rest};
{tref2Exttref(L1,TrefName),Rest};
parse_Reference([Tref1 = {typereference,_,_},{'.',_},Tref2 = {typereference,_,_},
{'{',_L2},{'}',_L3}|Rest]) ->
-% {{Tref1,Tref2},Rest};
{{tref2Exttref(Tref1),tref2Exttref(Tref2)},Rest};
parse_Reference([Tref = {typereference,_L1,_TrefName}|Rest]) ->
{tref2Exttref(Tref),Rest};
-parse_Reference([Vref = {identifier,_L1,_VName},{'{',_L2},{'}',_L3}|Rest]) ->
+parse_Reference([#identifier{}=Vref,{'{',_L2},{'}',_L3}|Rest]) ->
{identifier2Extvalueref(Vref),Rest};
-parse_Reference([Vref = {identifier,_L1,_VName}|Rest]) ->
+parse_Reference([#identifier{}=Vref|Rest]) ->
{identifier2Extvalueref(Vref),Rest};
parse_Reference(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- [typereference,identifier]]}}).
+ parse_error(Tokens).
parse_Imports([{'IMPORTS',_L1},{';',_L2}|Rest]) ->
{{imports,[]},Rest};
@@ -168,9 +173,8 @@ parse_Imports([{'IMPORTS',_L1}|Rest]) ->
case Rest2 of
[{';',_L2}|Rest3] ->
{{imports,SymbolsFromModuleList},Rest3};
- Rest3 ->
- throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
- [got,get_token(hd(Rest3)),expected,';']}})
+ _ ->
+ parse_error(Rest2)
end;
parse_Imports(Tokens) ->
{{imports,[]},Tokens}.
@@ -180,11 +184,12 @@ parse_SymbolsFromModuleList(Tokens) ->
parse_SymbolsFromModuleList(Tokens,Acc) ->
{SymbolsFromModule,Rest} = parse_SymbolsFromModule(Tokens),
- case (catch parse_SymbolsFromModule(Rest)) of
+ try parse_SymbolsFromModule(Rest) of
{Sl,_Rest2} when is_record(Sl,'SymbolsFromModule') ->
- parse_SymbolsFromModuleList(Rest,[SymbolsFromModule|Acc]);
- _ ->
- {lists:reverse([SymbolsFromModule|Acc]),Rest}
+ parse_SymbolsFromModuleList(Rest, [SymbolsFromModule|Acc])
+ catch
+ throw:{asn1_error,_} ->
+ {lists:reverse(Acc, [SymbolsFromModule]),Rest}
end.
parse_SymbolsFromModule(Tokens) ->
@@ -198,169 +203,154 @@ parse_SymbolsFromModule(Tokens) ->
end,
{SymbolList,Rest} = parse_SymbolList(Tokens),
case Rest of
- [{'FROM',_L1},Tref = {typereference,_,Name},Ref={identifier,_L2,_Id},C={',',_}|Rest2] ->
- NewSymbolList = lists:map(SetRefModuleName(Name),SymbolList),
+ [{'FROM',_L1},{typereference,_,Name}=Tref|
+ [#identifier{},{',',_}|_]=Rest2] ->
+ NewSymbolList = lists:map(SetRefModuleName(Name), SymbolList),
{#'SymbolsFromModule'{symbols=NewSymbolList,
- module=tref2Exttref(Tref)},[Ref,C|Rest2]};
+ module=tref2Exttref(Tref)},Rest2};
%% This a special case when there is only one Symbol imported
%% from the next module. No other way to distinguish Ref from
%% a part of the GlobalModuleReference of Name.
- [{'FROM',_L1},Tref = {typereference,_,Name},Ref = {identifier,_L2,_Id},From = {'FROM',_}|Rest2] ->
- NewSymbolList = lists:map(SetRefModuleName(Name),SymbolList),
+ [{'FROM',_L1},{typereference,_,Name}=Tref|
+ [#identifier{},{'FROM',_}|_]=Rest2] ->
+ NewSymbolList = lists:map(SetRefModuleName(Name), SymbolList),
{#'SymbolsFromModule'{symbols=NewSymbolList,
- module=tref2Exttref(Tref)},[Ref,From|Rest2]};
- [{'FROM',_L1},Tref = {typereference,_,Name},{identifier,_L2,_Id}|Rest2] ->
- NewSymbolList = lists:map(SetRefModuleName(Name),SymbolList),
+ module=tref2Exttref(Tref)},Rest2};
+ [{'FROM',_L1},{typereference,_,Name}=Tref,#identifier{}|Rest2] ->
+ NewSymbolList = lists:map(SetRefModuleName(Name), SymbolList),
{#'SymbolsFromModule'{symbols=NewSymbolList,
module=tref2Exttref(Tref)},Rest2};
- [{'FROM',_L1},Tref = {typereference,_,Name},Brace = {'{',_}|Rest2] ->
- {_ObjIdVal,Rest3} = parse_ObjectIdentifierValue([Brace|Rest2]), % value not used yet, fix me
- NewSymbolList = lists:map(SetRefModuleName(Name),SymbolList),
+ [{'FROM',_L1},{typereference,_,Name}=Tref|[{'{',_}|_]=Rest2] ->
+ {_ObjIdVal,Rest3} = parse_ObjectIdentifierValue(Rest2), % value not used yet, fix me
+ NewSymbolList = lists:map(SetRefModuleName(Name), SymbolList),
{#'SymbolsFromModule'{symbols=NewSymbolList,
module=tref2Exttref(Tref)},Rest3};
- [{'FROM',_L1},Tref = {typereference,_,Name}|Rest2] ->
- NewSymbolList = lists:map(SetRefModuleName(Name),SymbolList),
+ [{'FROM',_L1},{typereference,_,Name}=Tref|Rest2] ->
+ NewSymbolList = lists:map(SetRefModuleName(Name), SymbolList),
{#'SymbolsFromModule'{symbols=NewSymbolList,
module=tref2Exttref(Tref)},Rest2};
_ ->
- throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
- [got,get_token(hd(Rest)),expected,
- ['FROM typerefernece identifier ,',
- 'FROM typereference identifier',
- 'FROM typereference {',
- 'FROM typereference']]}})
+ parse_error(Rest)
end.
parse_ObjectIdentifierValue([{'{',_}|Rest]) ->
parse_ObjectIdentifierValue(Rest,[]).
-parse_ObjectIdentifierValue([{number,_,Num}|Rest],Acc) ->
+parse_ObjectIdentifierValue([{number,_,Num}|Rest], Acc) ->
parse_ObjectIdentifierValue(Rest,[Num|Acc]);
-parse_ObjectIdentifierValue([{identifier,_,Id},{'(',_}, {number,_,Num}, {')',_}|Rest],Acc) ->
+parse_ObjectIdentifierValue([#identifier{val=Id},{'(',_},{number,_,Num},{')',_}|Rest], Acc) ->
parse_ObjectIdentifierValue(Rest,[{'NamedNumber',Id,Num}|Acc]);
-parse_ObjectIdentifierValue([{identifier,_,Id},{'(',_}, {identifier,_,Id2}, {')',_}|Rest],Acc) ->
+parse_ObjectIdentifierValue([#identifier{val=Id},{'(',_},#identifier{val=Id2},{')',_}|Rest], Acc) ->
parse_ObjectIdentifierValue(Rest,[{'NamedNumber',Id,Id2}|Acc]);
-parse_ObjectIdentifierValue([{identifier,_,Id},{'(',_}, {typereference,_,Tref},{'.',_},{identifier,_,Id2}, {')',_}|Rest],Acc) ->
- parse_ObjectIdentifierValue(Rest,[{'NamedNumber',Id,{'ExternalValue',Tref,Id2}}|Acc]);
-parse_ObjectIdentifierValue([Id = {identifier,_,_}|Rest],Acc) ->
- parse_ObjectIdentifierValue(Rest,[identifier2Extvalueref(Id)|Acc]);
-parse_ObjectIdentifierValue([{'}',_}|Rest],Acc) ->
+parse_ObjectIdentifierValue([#identifier{val=Id},{'(',_},{typereference,_,Tref},{'.',_},#identifier{val=Id2}, {')',_}|Rest], Acc) ->
+ parse_ObjectIdentifierValue(Rest, [{'NamedNumber',Id,{'ExternalValue',Tref,Id2}}|Acc]);
+parse_ObjectIdentifierValue([#identifier{}=Id|Rest], Acc) ->
+ parse_ObjectIdentifierValue(Rest, [identifier2Extvalueref(Id)|Acc]);
+parse_ObjectIdentifierValue([{'}',_}|Rest], Acc) ->
{lists:reverse(Acc),Rest};
-parse_ObjectIdentifierValue([H|_T],_Acc) ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,
- ['{ some of the following }',number,'identifier ( number )',
- 'identifier ( identifier )',
- 'identifier ( typereference.identifier)',identifier]]}}).
+parse_ObjectIdentifierValue(Tokens, _Acc) ->
+ parse_error(Tokens).
-parse_AssignmentList(Tokens = [{'END',_}|_Rest]) ->
- {[],Tokens};
-parse_AssignmentList(Tokens = [{'$end',_}|_Rest]) ->
- {[],Tokens};
parse_AssignmentList(Tokens) ->
- parse_AssignmentList(Tokens,[]).
+ parse_AssignmentList(Tokens, []).
-parse_AssignmentList(Tokens= [{'END',_}|_Rest],Acc) ->
+parse_AssignmentList([{'END',_}|_]=Tokens, Acc) ->
{lists:reverse(Acc),Tokens};
-parse_AssignmentList(Tokens= [{'$end',_}|_Rest],Acc) ->
+parse_AssignmentList([{'$end',_}|_]=Tokens, Acc) ->
{lists:reverse(Acc),Tokens};
-parse_AssignmentList(Tokens,Acc) ->
- case (catch parse_Assignment(Tokens)) of
- {'EXIT',Reason} ->
- exit(Reason);
- {asn1_error,R} ->
-% [H|T] = Tokens,
- throw({error,{R,hd(Tokens)}});
- {Assignment,Rest} ->
- parse_AssignmentList(Rest,[Assignment|Acc])
- end.
-
-parse_Assignment(Tokens) ->
- Flist = [fun parse_TypeAssignment/1,
- fun parse_ValueAssignment/1,
- fun parse_ObjectClassAssignment/1,
- fun parse_ObjectAssignment/1,
- fun parse_ObjectSetAssignment/1,
- fun parse_ParameterizedAssignment/1,
+parse_AssignmentList(Tokens0, Acc) ->
+ {Assignment,Tokens} = parse_Assignment(Tokens0),
+ parse_AssignmentList(Tokens, [Assignment|Acc]).
+
+parse_Assignment([{typereference,L1,Name},{'::=',_}|Tokens0]) ->
+ %% 1) Type ::= TypeDefinition
+ %% 2) CLASS-NAME ::= CLASS {...}
+ Flist = [{type,fun parse_Type/1},
+ {class,fun parse_ObjectClass/1}],
+ case parse_or_tag(Tokens0, Flist) of
+ {{type,Type},Tokens} ->
+ %% TypeAssignment
+ {#typedef{pos=L1,name=Name,typespec=Type},Tokens};
+ {{class,Type},Tokens} ->
+ %% ObjectClassAssignment
+ {#classdef{pos=L1,name=Name,module=resolve_module(Type),
+ typespec=Type},Tokens}
+ end;
+parse_Assignment([{typereference,_,_},{'{',_}|_]=Tokens) ->
+ %% 1) Type{...} ::= ...
+ %% 2) ValueSet{...} Type ::= ...
+ %% ObjectSet{...} CLASS-NAME ::= CLASS {...}
+ %% 3) CLASS-NAME{...} ::= CLASS {...}
+ %% A parameterized value set and and a parameterized object set
+ %% cannot be distinguished from each other without type information.
+ Flist = [fun parse_ParameterizedTypeAssignment/1,
+ fun parse_ParameterizedValueSetTypeAssignment/1,
+ fun parse_ParameterizedObjectClassAssignment/1],
+ parse_or(Tokens, Flist);
+parse_Assignment([{typereference,_,_}|_]=Tokens) ->
+ %% 1) ObjectSet CLASS-NAME ::= ...
+ %% 2) ValueSet Type ::= ...
+ Flist = [fun parse_ObjectSetAssignment/1,
fun parse_ValueSetTypeAssignment/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- {asn1_assignment_error,Reason} ->
- throw({asn1_error,Reason});
- Result ->
- Result
- end.
-
+ parse_or(Tokens, Flist);
+parse_Assignment([#identifier{},{'{',_}|_]=Tokens) ->
+ %% 1) value{...} Type ::= ...
+ %% 2) object{...} CLASS-NAME ::= ...
+ Flist = [fun parse_ParameterizedValueAssignment/1,
+ fun parse_ParameterizedObjectAssignment/1],
+ parse_or(Tokens, Flist);
+parse_Assignment([#identifier{}|_]=Tokens) ->
+ %% 1) value Type ::= ...
+ %% 2) object CLASS-NAME ::= ...
+ Flist = [fun parse_ValueAssignment/1,
+ fun parse_ObjectAssignment/1],
+ parse_or(Tokens, Flist);
+parse_Assignment(Tokens) ->
+ parse_error(Tokens).
parse_or(Tokens,Flist) ->
parse_or(Tokens,Flist,[]).
-parse_or(_Tokens,[],ErrList) ->
- case ErrList of
- [] ->
- throw({asn1_error,{parse_or,ErrList}});
- L when is_list(L) ->
- %% chose to throw 1) the error with the highest line no,
- %% 2) the last error which is not a asn1_assignment_error or
- %% 3) the last error.
- throw(prioritize_error(ErrList))
+parse_or(Tokens, [Fun|Funs], ErrList) when is_function(Fun, 1) ->
+ try Fun(Tokens) of
+ {_,Rest}=Result when is_list(Rest) ->
+ Result
+ catch
+ throw:{asn1_error,Error} ->
+ parse_or(Tokens, Funs, [Error|ErrList])
end;
-parse_or(Tokens,[Fun|Frest],ErrList) ->
- case (catch Fun(Tokens)) of
- Exit = {'EXIT',_Reason} ->
- parse_or(Tokens,Frest,[Exit|ErrList]);
- AsnErr = {asn1_error,_} ->
- parse_or(Tokens,Frest,[AsnErr|ErrList]);
- AsnAssErr = {asn1_assignment_error,_} ->
- parse_or(Tokens,Frest,[AsnAssErr|ErrList]);
- Result = {_,L} when is_list(L) ->
- Result;
- Error ->
- parse_or(Tokens,Frest,[Error|ErrList])
- end.
-
-parse_or_tag(Tokens,Flist) ->
- parse_or_tag(Tokens,Flist,[]).
-
-parse_or_tag(_Tokens,[],ErrList) ->
- case ErrList of
- [] ->
- throw({asn1_error,{parse_or_tag,ErrList}});
- L when is_list(L) ->
- %% chose to throw 1) the error with the highest line no,
- %% 2) the last error which is not a asn1_assignment_error or
- %% 3) the last error.
- throw(prioritize_error(ErrList))
+parse_or(_Tokens, [], ErrList) ->
+ throw({asn1_error,fun() -> prioritize_error(ErrList) end}).
+
+parse_or_tag(Tokens, Flist) ->
+ parse_or_tag(Tokens, Flist, []).
+
+parse_or_tag(Tokens, [{Tag,Fun}|Funs], ErrList) when is_function(Fun, 1) ->
+ try Fun(Tokens) of
+ {Parsed,Rest} when is_list(Rest) ->
+ {{Tag,Parsed},Rest}
+ catch
+ throw:{asn1_error,Error} ->
+ parse_or_tag(Tokens, Funs, [Error|ErrList])
end;
-parse_or_tag(Tokens,[{Tag,Fun}|Frest],ErrList) when is_function(Fun) ->
- case (catch Fun(Tokens)) of
- Exit = {'EXIT',_Reason} ->
- parse_or_tag(Tokens,Frest,[Exit|ErrList]);
- AsnErr = {asn1_error,_} ->
- parse_or_tag(Tokens,Frest,[AsnErr|ErrList]);
- AsnAssErr = {asn1_assignment_error,_} ->
- parse_or_tag(Tokens,Frest,[AsnAssErr|ErrList]);
- {ParseRes,Rest} when is_list(Rest) ->
- {{Tag,ParseRes},Rest};
- Error ->
- parse_or_tag(Tokens,Frest,[Error|ErrList])
- end.
+parse_or_tag(_Tokens, [], ErrList) ->
+ throw({asn1_error,fun() -> prioritize_error(ErrList) end}).
+
+prioritize_error(Errors0) ->
+ Errors1 = prioritize_error_1(Errors0),
+ Errors2 = [{length(L),L} || L <- Errors1],
+ Errors = lists:sort(Errors2),
+ [Res|_] = [L || {_,L} <- Errors],
+ Res.
+
+prioritize_error_1([F|T]) when is_function(F, 0) ->
+ [F()|prioritize_error_1(T)];
+prioritize_error_1([{parse_error,Tokens}|T]) ->
+ [Tokens|prioritize_error_1(T)];
+prioritize_error_1([]) ->
+ [].
-parse_TypeAssignment([{typereference,L1,Tref},{'::=',_}|Rest]) ->
- {Type,Rest2} = parse_Type(Rest),
- {#typedef{pos=L1,name=Tref,typespec=Type},Rest2};
-parse_TypeAssignment([H1,H2|_Rest]) ->
- throw({asn1_assignment_error,{get_line(H1),get(asn1_module),
- [got,[get_token(H1),get_token(H2)], expected,
- typereference,'::=']}});
-parse_TypeAssignment([H|_T]) ->
- throw({asn1_assignment_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,
- typereference]}}).
%% parse_Type(Tokens) -> Ret
%%
@@ -370,9 +360,8 @@ parse_TypeAssignment([H|_T]) ->
%%
parse_Type(Tokens) ->
{Tag,Rest3} = case Tokens of
- [Lbr= {'[',_}|Rest] ->
- parse_Tag([Lbr|Rest]);
- Rest-> {[],Rest}
+ [{'[',_}|_] -> parse_Tag(Tokens);
+ _ -> {[],Tokens}
end,
{Tag2,Rest4} = case Rest3 of
[{'IMPLICIT',_}|Rest31] when is_record(Tag,tag)->
@@ -384,31 +373,17 @@ parse_Type(Tokens) ->
Rest31 ->
{Tag,Rest31}
end,
- Flist = [fun parse_BuiltinType/1,fun parse_ReferencedType/1,fun parse_TypeWithConstraint/1],
- {Type,Rest5} = case (catch parse_or(Rest4,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_Reason} ->
- throw(AsnErr);
- Result ->
- Result
- end,
- case hd(Rest5) of
- {'(',_} ->
+ Flist = [fun parse_BuiltinType/1,
+ fun parse_ReferencedType/1,
+ fun parse_TypeWithConstraint/1],
+ {Type,Rest5} = parse_or(Rest4, Flist),
+ case Rest5 of
+ [{'(',_}|_] ->
{Constraints,Rest6} = parse_Constraints(Rest5),
- if is_record(Type,type) ->
- {Type#type{constraint=merge_constraints(Constraints),
- tag=Tag2},Rest6};
- true ->
- {#type{def=Type,constraint=merge_constraints(Constraints),
- tag=Tag2},Rest6}
- end;
- _ ->
- if is_record(Type,type) ->
- {Type#type{tag=Tag2},Rest5};
- true ->
- {#type{def=Type,tag=Tag2},Rest5}
- end
+ {Type#type{tag=Tag2,
+ constraint=merge_constraints(Constraints)},Rest6};
+ [_|_] ->
+ {Type#type{tag=Tag2},Rest5}
end.
parse_BuiltinType([{'BIT',_},{'STRING',_}|Rest]) ->
@@ -419,11 +394,10 @@ parse_BuiltinType([{'BIT',_},{'STRING',_}|Rest]) ->
[{'}',_}|Rest4] ->
{#type{def={'BIT STRING',NamedNumberList}},Rest4};
_ ->
- throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
- [got,get_token(hd(Rest3)),expected,'}']}})
+ parse_error(Rest3)
end;
_ ->
- {{'BIT STRING',[]},Rest}
+ {#type{def={'BIT STRING',[]}},Rest}
end;
parse_BuiltinType([{'BOOLEAN',_}|Rest]) ->
{#type{def='BOOLEAN'},Rest};
@@ -435,41 +409,33 @@ parse_BuiltinType([{'CHARACTER',_},{'STRING',_}|Rest]) ->
{#type{def='CHARACTER STRING'},Rest};
parse_BuiltinType([{'CHOICE',_},{'{',_}|Rest]) ->
- {AlternativeTypeLists,Rest2} = parse_AlternativeTypeLists(Rest),
- AlternativeTypeLists1 =
- lists:filter(fun(#'ExtensionAdditionGroup'{}) -> false;
- ('ExtensionAdditionGroupEnd') -> false;
- (_) -> true
- end,AlternativeTypeLists),
+ {L0,Rest2} = parse_AlternativeTypeLists(Rest),
case Rest2 of
[{'}',_}|Rest3] ->
- AlternativeTypeLists2 =
- case {[Ext||Ext = #'EXTENSIONMARK'{} <- AlternativeTypeLists1],
- get(extensiondefault)} of
- {[],'IMPLIED'} -> AlternativeTypeLists1 ++ [#'EXTENSIONMARK'{}];
- _ -> AlternativeTypeLists1
+ NeedExt = not lists:keymember('EXTENSIONMARK', 1, L0) andalso
+ get(extensiondefault) =:= 'IMPLIED',
+ L = case NeedExt of
+ true ->
+ L0 ++ [#'EXTENSIONMARK'{}];
+ false ->
+ L0
end,
-
- {#type{def={'CHOICE',AlternativeTypeLists2}},Rest3};
+ {#type{def={'CHOICE',L}},Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'}']}})
+ parse_error(Rest2)
end;
parse_BuiltinType([{'EMBEDDED',_},{'PDV',_}|Rest]) ->
{#type{def='EMBEDDED PDV'},Rest};
parse_BuiltinType([{'ENUMERATED',_},{'{',_}|Rest]) ->
- {Enumerations,Rest2} = parse_Enumerations(Rest,get(extensiondefault)),
+ {Enumerations,Rest2} = parse_Enumerations(Rest),
case Rest2 of
[{'}',_}|Rest3] ->
{#type{def={'ENUMERATED',Enumerations}},Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'}']}})
+ parse_error(Rest2)
end;
parse_BuiltinType([{'EXTERNAL',_}|Rest]) ->
{#type{def='EXTERNAL'},Rest};
-
-% InstanceOfType
parse_BuiltinType([{'INSTANCE',_},{'OF',_}|Rest]) ->
{DefinedObjectClass,Rest2} = parse_DefinedObjectClass(Rest),
case Rest2 of
@@ -480,9 +446,6 @@ parse_BuiltinType([{'INSTANCE',_},{'OF',_}|Rest]) ->
_ ->
{#type{def={'INSTANCE OF',DefinedObjectClass,[]}},Rest2}
end;
-
-% parse_BuiltinType(Tokens) ->
-
parse_BuiltinType([{'INTEGER',_}|Rest]) ->
case Rest of
[{'{',_}|Rest2] ->
@@ -491,17 +454,13 @@ parse_BuiltinType([{'INTEGER',_}|Rest]) ->
[{'}',_}|Rest4] ->
{#type{def={'INTEGER',NamedNumberList}},Rest4};
_ ->
- throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
- [got,get_token(hd(Rest3)),expected,'}']}})
+ parse_error(Rest3)
end;
_ ->
{#type{def='INTEGER'},Rest}
end;
parse_BuiltinType([{'NULL',_}|Rest]) ->
{#type{def='NULL'},Rest};
-
-% ObjectClassFieldType fix me later
-
parse_BuiltinType([{'OBJECT',_},{'IDENTIFIER',_}|Rest]) ->
{#type{def='OBJECT IDENTIFIER'},Rest};
parse_BuiltinType([{'OCTET',_},{'STRING',_}|Rest]) ->
@@ -529,18 +488,14 @@ parse_BuiltinType([{'SEQUENCE',_},{'{',_},{'...',Line},{'!',_}|Rest]) ->
parse_ComponentTypeLists2(Rest2,[#'EXTENSIONMARK'{pos=Line}]),
case Rest3 of
[{'}',_}|Rest4] ->
- {#type{def=#'SEQUENCE'{components=ComponentTypeLists}},Rest4};
+ {#type{def=#'SEQUENCE'{components=ComponentTypeLists}},Rest4};
_ ->
- throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
- [got,get_token(hd(Rest3)),expected,'}']}})
+ parse_error(Rest3)
end
-% _ -> % Seq case 4,17-19,23-26 will fail here
-% throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
-% [got,get_token(hd(Rest2)),expected,'}']}})
end;
parse_BuiltinType([{'SEQUENCE',_},{'{',_}|Rest]) ->
{ComponentTypeLists,Rest2} = parse_ComponentTypeLists(Rest),
- case Rest2 of
+ case Rest2 of
[{'}',_}|Rest3] ->
ComponentTypeLists2 =
case {[Ext||Ext = #'EXTENSIONMARK'{} <- ComponentTypeLists],
@@ -551,25 +506,19 @@ parse_BuiltinType([{'SEQUENCE',_},{'{',_}|Rest]) ->
{#type{def=#'SEQUENCE'{components = ComponentTypeLists2}},
Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'}']}})
+ parse_error(Rest2)
end;
-
-parse_BuiltinType([{'SEQUENCE',_},{'OF',_},Id={identifier,_,_},Lt={'<',_}|Rest]) ->
-%% TODO: take care of the identifier for something useful
- {Type,Rest2} = parse_SelectionType([Id,Lt|Rest]),
- {#type{def={'SEQUENCE OF',#type{def=Type,tag=[]}}},Rest2};
-
-parse_BuiltinType([{'SEQUENCE',_},{'OF',_},{identifier,_,_} |Rest]) ->
+parse_BuiltinType([{'SEQUENCE',_},{'OF',_}|
+ [#identifier{},{'<',_}|_]=Tokens0]) ->
+ {Type,Tokens} = parse_SelectionType(Tokens0),
+ {#type{def={'SEQUENCE OF',Type}},Tokens};
+parse_BuiltinType([{'SEQUENCE',_},{'OF',_},#identifier{} |Rest]) ->
%% TODO: take care of the identifier for something useful
{Type,Rest2} = parse_Type(Rest),
{#type{def={'SEQUENCE OF',Type}},Rest2};
-
parse_BuiltinType([{'SEQUENCE',_},{'OF',_}|Rest]) ->
{Type,Rest2} = parse_Type(Rest),
{#type{def={'SEQUENCE OF',Type}},Rest2};
-
-
parse_BuiltinType([{'SET',_},{'{',_},{'...',Line},{'}',_}|Rest]) ->
{#type{def=#'SET'{components=[#'EXTENSIONMARK'{pos = Line}]}},Rest};
parse_BuiltinType([{'SET',_},{'{',_},{'...',Line},{'!',_}|Rest]) ->
@@ -581,12 +530,18 @@ parse_BuiltinType([{'SET',_},{'{',_},{'...',Line},{'!',_}|Rest]) ->
val = ExceptionIdentification}]}},
Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'}']}})
+ {ComponentTypeLists,Rest3}=
+ parse_ComponentTypeLists2(Rest2,[#'EXTENSIONMARK'{pos=Line}]),
+ case Rest3 of
+ [{'}',_}|Rest4] ->
+ {#type{def=#'SET'{components=ComponentTypeLists}},Rest4};
+ _ ->
+ parse_error(Rest3)
+ end
end;
parse_BuiltinType([{'SET',_},{'{',_}|Rest]) ->
{ComponentTypeLists,Rest2} = parse_ComponentTypeLists(Rest),
- case Rest2 of
+ case Rest2 of
[{'}',_}|Rest3] ->
ComponentTypeLists2 =
case {[Ext||Ext = #'EXTENSIONMARK'{} <- ComponentTypeLists],
@@ -597,184 +552,128 @@ parse_BuiltinType([{'SET',_},{'{',_}|Rest]) ->
{#type{def=#'SET'{components = ComponentTypeLists2}},
Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'}']}})
+ parse_error(Rest2)
end;
-
-parse_BuiltinType([{'SET',_},{'OF',_},Id={identifier,_,_},Lt={'<',_}|Rest]) ->
-%% TODO: take care of the identifier for something useful
- {Type,Rest2} = parse_SelectionType([Id,Lt|Rest]),
- {#type{def={'SET OF',#type{def=Type,tag=[]}}},Rest2};
-
-
-parse_BuiltinType([{'SET',_},{'OF',_},{identifier,_,_}|Rest]) ->
+parse_BuiltinType([{'SET',_},{'OF',_}|
+ [#identifier{},{'<',_}|_]=Tokens0]) ->
+ {Type,Tokens} = parse_SelectionType(Tokens0),
+ {#type{def={'SET OF',Type}},Tokens};
+parse_BuiltinType([{'SET',_},{'OF',_},#identifier{}|Rest]) ->
%%TODO: take care of the identifier for something useful
{Type,Rest2} = parse_Type(Rest),
{#type{def={'SET OF',Type}},Rest2};
-
parse_BuiltinType([{'SET',_},{'OF',_}|Rest]) ->
{Type,Rest2} = parse_Type(Rest),
{#type{def={'SET OF',Type}},Rest2};
-
-%% The so called Useful types
parse_BuiltinType([{'GeneralizedTime',_}|Rest]) ->
{#type{def='GeneralizedTime'},Rest};
parse_BuiltinType([{'UTCTime',_}|Rest]) ->
{#type{def='UTCTime'},Rest};
parse_BuiltinType([{'ObjectDescriptor',_}|Rest]) ->
{#type{def='ObjectDescriptor'},Rest};
-
-%% For compatibility with old standard
-parse_BuiltinType([{'ANY',_},{'DEFINED',_},{'BY',_},{identifier,_,Id}|Rest]) ->
+parse_BuiltinType([{'ANY',_},{'DEFINED',_},{'BY',_},#identifier{val=Id}|Rest]) ->
+ %% For compatibility with the old standard.
{#type{def={'ANY_DEFINED_BY',Id}},Rest};
parse_BuiltinType([{'ANY',_}|Rest]) ->
+ %% For compatibility with the old standard.
{#type{def='ANY'},Rest};
-
parse_BuiltinType(Tokens) ->
parse_ObjectClassFieldType(Tokens).
-% throw({asn1_error,unhandled_type}).
-parse_TypeWithConstraint([{'SEQUENCE',_},Lpar = {'(',_}|Rest]) ->
- {Constraint,Rest2} = parse_Constraint([Lpar|Rest]),
+parse_TypeWithConstraint([{'SEQUENCE',_}|[{'(',_}|_]=Rest0]) ->
+ {Constraint,Rest2} = parse_Constraint(Rest0),
Rest4 = case Rest2 of
- [{'OF',_}, {identifier,_,_Id}|Rest3] ->
+ [{'OF',_},#identifier{}|Rest3] ->
%%% TODO: make some use of the identifier, maybe useful in the XML mapping
Rest3;
[{'OF',_}|Rest3] ->
Rest3;
_ ->
- throw({asn1_error,
- {get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'OF']}})
+ parse_error(Rest2)
end,
{Type,Rest5} = parse_Type(Rest4),
{#type{def = {'SEQUENCE OF',Type},
constraint = merge_constraints([Constraint])},Rest5};
-parse_TypeWithConstraint([{'SEQUENCE',_},{'SIZE',_},Lpar = {'(',_}|Rest]) ->
- {Constraint,Rest2} = parse_Constraint([Lpar|Rest]),
+parse_TypeWithConstraint([{'SEQUENCE',_},{'SIZE',_}|[{'(',_}|_]=Rest0]) ->
+ {Constraint,Rest2} = parse_Constraint(Rest0),
#constraint{c=C} = Constraint,
- Constraint2 = Constraint#constraint{c={'SizeConstraint',C}},
+ Constraint2 = Constraint#constraint{c={element_set,{'SizeConstraint',C},
+ none}},
Rest4 = case Rest2 of
- [{'OF',_}, {identifier,_,_Id}|Rest3] ->
+ [{'OF',_},#identifier{}|Rest3] ->
%%% TODO: make some use of the identifier, maybe useful in the XML mapping
Rest3;
[{'OF',_}|Rest3] ->
Rest3;
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'OF']}})
+ parse_error(Rest2)
end,
{Type,Rest5} = parse_Type(Rest4),
{#type{def = {'SEQUENCE OF',Type}, constraint = merge_constraints([Constraint2])},Rest5};
-parse_TypeWithConstraint([{'SET',_},Lpar = {'(',_}|Rest]) ->
- {Constraint,Rest2} = parse_Constraint([Lpar|Rest]),
+parse_TypeWithConstraint([{'SET',_}|[{'(',_}|_]=Rest0]) ->
+ {Constraint,Rest2} = parse_Constraint(Rest0),
Rest4 = case Rest2 of
- [{'OF',_}, {identifier,_,_Id}|Rest3] ->
+ [{'OF',_},#identifier{}|Rest3] ->
%%% TODO: make some use of the identifier, maybe useful in the XML mapping
Rest3;
[{'OF',_}|Rest3] ->
Rest3;
_ ->
- throw({asn1_error,
- {get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'OF']}})
+ parse_error(Rest2)
end,
{Type,Rest5} = parse_Type(Rest4),
{#type{def = {'SET OF',Type},
constraint = merge_constraints([Constraint])},Rest5};
-parse_TypeWithConstraint([{'SET',_},{'SIZE',_},Lpar = {'(',_}|Rest]) ->
- {Constraint,Rest2} = parse_Constraint([Lpar|Rest]),
+parse_TypeWithConstraint([{'SET',_},{'SIZE',_}|[{'(',_}|_]=Rest0]) ->
+ {Constraint,Rest2} = parse_Constraint(Rest0),
#constraint{c=C} = Constraint,
- Constraint2 = Constraint#constraint{c={'SizeConstraint',C}},
+ Constraint2 = Constraint#constraint{c={element_set,
+ {'SizeConstraint',C},none}},
Rest4 = case Rest2 of
- [{'OF',_}, {identifier,_,_Id}|Rest3] ->
+ [{'OF',_},#identifier{}|Rest3] ->
%%% TODO: make some use of the identifier, maybe useful in the XML mapping
Rest3;
[{'OF',_}|Rest3] ->
Rest3;
_ ->
- throw({asn1_error,
- {get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'OF']}})
+ parse_error(Rest2)
end,
{Type,Rest5} = parse_Type(Rest4),
{#type{def = {'SET OF',Type},
constraint = merge_constraints([Constraint2])},Rest5};
parse_TypeWithConstraint(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- ['SEQUENCE','SEQUENCE SIZE','SET','SET SIZE'],
- followed,by,a,constraint]}}).
+ parse_error(Tokens).
%% --------------------------
parse_ReferencedType(Tokens) ->
- Flist = [fun parse_DefinedType/1,
+ Flist = [fun parse_ParameterizedType/1,
+ fun parse_DefinedType/1,
fun parse_SelectionType/1,
- fun parse_TypeFromObject/1,
- fun parse_ValueSetFromObjects/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ fun parse_TypeFromObject/1],
+ parse_or(Tokens, Flist).
-parse_DefinedType(Tokens=[{typereference,_,_},{'{',_}|_Rest]) ->
- parse_ParameterizedType(Tokens);
-parse_DefinedType(Tokens=[{typereference,L1,TypeName},
- T2={typereference,_,_},T3={'{',_}|Rest]) ->
- case (catch parse_ParameterizedType(Tokens)) of
- {'EXIT',_Reason} ->
- Rest2 = [T2,T3|Rest],
- {#type{def = #'Externaltypereference'{pos=L1,
- module=resolve_module(TypeName),
- type=TypeName}},Rest2};
- {asn1_error,_} ->
- Rest2 = [T2,T3|Rest],
- {#type{def = #'Externaltypereference'{pos=L1,
- module=resolve_module(TypeName),
- type=TypeName}},Rest2};
- Result ->
- Result
- end;
-parse_DefinedType(Tokens=[{typereference,_L1,_Module},{'.',_},
- {typereference,_,_TypeName},{'{',_}|_Rest]) ->
- parse_ParameterizedType(Tokens);
-parse_DefinedType([{typereference,L1,Module},{'.',_},{typereference,_,TypeName}|Rest]) ->
- {#type{def = #'Externaltypereference'{pos=L1,module=Module,type=TypeName}},Rest};
-parse_DefinedType([{typereference,L1,TypeName}|Rest]) ->
- case is_pre_defined_class(TypeName) of
- false ->
- {#type{def = #'Externaltypereference'{pos=L1,module=resolve_module(TypeName),
- type=TypeName}},Rest};
- _ ->
- throw({asn1_error,
- {L1,get(asn1_module),
- [got,TypeName,expected,
- [typereference,'typereference.typereference',
- 'typereference typereference']]}})
- end;
+parse_DefinedType([{typereference,L1,Module},
+ {'.',_},
+ {typereference,_,TypeName}|Tokens]) ->
+ {#type{def = #'Externaltypereference'{pos=L1,module=Module,
+ type=TypeName}},Tokens};
+parse_DefinedType([{typereference,_,_}=Tr|Tokens]) ->
+ {#type{def=tref2Exttref(Tr)},Tokens};
parse_DefinedType(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- [typereference,'typereference.typereference',
- 'typereference typereference']]}}).
+ parse_error(Tokens).
-parse_SelectionType([{identifier,_,Name},{'<',_}|Rest]) ->
+parse_SelectionType([#identifier{val=Name},{'<',_}|Rest]) ->
{Type,Rest2} = parse_Type(Rest),
- {{'SelectionType',Name,Type},Rest2};
+ {#type{def={'SelectionType',Name,Type}},Rest2};
parse_SelectionType(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'identifier <']}}).
+ parse_error(Tokens).
resolve_module(Type) ->
@@ -787,30 +686,13 @@ resolve_module(_Type, Current, undefined) ->
resolve_module(Type, Current, Imports) ->
case [Mod || #'SymbolsFromModule'{symbols = S, module = Mod} <- Imports,
#'Externaltypereference'{type = T} <- S,
- Type == T] of
+ Type =:= T] of
[#'Externaltypereference'{type = Mod}|_] -> Mod;
%% This allows the same symbol to be imported several times
%% which ought to be checked elsewhere and flagged as an error
[] -> Current
end.
-%% --------------------------
-
-
-%% This should probably be removed very soon
-% parse_ConstrainedType(Tokens) ->
-% case (catch parse_TypeWithConstraint(Tokens)) of
-% {'EXIT',Reason} ->
-% {Type,Rest} = parse_Type(Tokens),
-% {Constraint,Rest2} = parse_Constraint(Rest),
-% {Type#type{constraint=Constraint},Rest2};
-% {asn1_error,Reason2} ->
-% {Type,Rest} = parse_Type(Tokens),
-% {Constraint,Rest2} = parse_Constraint(Rest),
-% {Type#type{constraint=Constraint},Rest2};
-% Result ->
-% Result
-% end.
parse_Constraints(Tokens) ->
parse_Constraints(Tokens,[]).
@@ -819,9 +701,9 @@ parse_Constraints(Tokens,Acc) ->
{Constraint,Rest} = parse_Constraint(Tokens),
case Rest of
[{'(',_}|_Rest2] ->
- parse_Constraints(Rest,[Constraint|Acc]);
+ parse_Constraints(Rest, [Constraint|Acc]);
_ ->
- {lists:reverse([Constraint|Acc]),Rest}
+ {lists:reverse(Acc, [Constraint]),Rest}
end.
parse_Constraint([{'(',_}|Rest]) ->
@@ -830,46 +712,27 @@ parse_Constraint([{'(',_}|Rest]) ->
case Rest3 of
[{')',_}|Rest4] ->
{#constraint{c=Constraint,e=Exception},Rest4};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,')']}})
- end;
-parse_Constraint(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'(']}}).
+ [_|_] ->
+ parse_error(Rest3)
+ end.
parse_ConstraintSpec(Tokens) ->
Flist = [fun parse_GeneralConstraint/1,
fun parse_SubtypeConstraint/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- {asn1_error,Reason2} ->
- throw({asn1_error,Reason2});
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_ExceptionSpec([LPar={')',_}|Rest]) ->
{undefined,[LPar|Rest]};
parse_ExceptionSpec([{'!',_}|Rest]) ->
parse_ExceptionIdentification(Rest);
parse_ExceptionSpec(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,[')','!']]}}).
+ parse_error(Tokens).
parse_ExceptionIdentification(Tokens) ->
Flist = [fun parse_SignedNumber/1,
fun parse_DefinedValue/1,
fun parse_TypeColonValue/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- {asn1_error,Reason2} ->
- throw({asn1_error,Reason2});
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_TypeColonValue(Tokens) ->
{Type,Rest} = parse_Type(Tokens),
@@ -877,32 +740,28 @@ parse_TypeColonValue(Tokens) ->
[{':',_}|Rest2] ->
{Value,Rest3} = parse_Value(Rest2),
{{Type,Value},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,':']}})
+ [_|_] ->
+ parse_error(Rest)
end.
parse_SubtypeConstraint(Tokens) ->
parse_ElementSetSpecs(Tokens).
-parse_ElementSetSpecs([{'...',_}|Rest]) ->
- {Elements,Rest2} = parse_ElementSetSpec(Rest),
- {{[],Elements},Rest2};
parse_ElementSetSpecs(Tokens) ->
{RootElems,Rest} = parse_ElementSetSpec(Tokens),
case Rest of
[{',',_},{'...',_},{',',_}|Rest2] ->
{AdditionalElems,Rest3} = parse_ElementSetSpec(Rest2),
- {{RootElems,AdditionalElems},Rest3};
+ {{element_set,RootElems,AdditionalElems},Rest3};
[{',',_},{'...',_}|Rest2] ->
- {{RootElems,[]},Rest2};
+ {{element_set,RootElems,empty},Rest2};
_ ->
- {RootElems,Rest}
+ {{element_set,RootElems,none},Rest}
end.
parse_ElementSetSpec([{'ALL',_},{'EXCEPT',_}|Rest]) ->
{Exclusions,Rest2} = parse_Elements(Rest),
- {{'ALL',{'EXCEPT',Exclusions}},Rest2};
+ {{'ALL-EXCEPT',Exclusions},Rest2};
parse_ElementSetSpec(Tokens) ->
parse_Unions(Tokens).
@@ -918,14 +777,8 @@ parse_Unions(Tokens) ->
case {InterSec,Unions} of
{InterSec,[]} ->
{InterSec,Rest2};
- {{'SingleValue',V1},{'SingleValue',V2}} ->
- {{'SingleValue',ordsets:union(to_set(V1),to_set(V2))},Rest2};
- {V1,V2} when is_list(V2) ->
- {[V1] ++ [union|V2],Rest2};
{V1,V2} ->
- {[V1,union,V2],Rest2}
-% Other ->
-% throw(Other)
+ {{union,V1,V2},Rest2}
end.
parse_UnionsRec([{'|',_}|Rest]) ->
@@ -934,12 +787,8 @@ parse_UnionsRec([{'|',_}|Rest]) ->
case {InterSec,URec} of
{V1,[]} ->
{V1,Rest3};
- {{'SingleValue',V1},{'SingleValue',V2}} ->
- {{'SingleValue',ordsets:union(to_set(V1),to_set(V2))},Rest3};
- {V1,V2} when is_list(V2) ->
- {[V1] ++ [union|V2],Rest3};
{V1,V2} ->
- {[V1,union,V2],Rest3}
+ {{union,V1,V2},Rest3}
end;
parse_UnionsRec([{'UNION',Info}|Rest]) ->
parse_UnionsRec([{'|',Info}|Rest]);
@@ -952,13 +801,8 @@ parse_Intersections(Tokens) ->
case {InterSec,IRec} of
{V1,[]} ->
{V1,Rest2};
- {{'SingleValue',V1},{'SingleValue',V2}} ->
- {{'SingleValue',
- ordsets:intersection(to_set(V1),to_set(V2))},Rest2};
- {V1,V2} when is_list(V2) ->
- {[V1] ++ [intersection|V2],Rest2};
{V1,V2} ->
- {[V1,intersection,V2],Rest2}
+ {{intersection,V1,V2},Rest2}
end.
%% parse_IElemsRec(Tokens) -> Result
@@ -967,15 +811,10 @@ parse_IElemsRec([{'^',_}|Rest]) ->
{InterSec,Rest2} = parse_IntersectionElements(Rest),
{IRec,Rest3} = parse_IElemsRec(Rest2),
case {InterSec,IRec} of
- {{'SingleValue',V1},{'SingleValue',V2}} ->
- {{'SingleValue',
- ordsets:intersection(to_set(V1),to_set(V2))},Rest3};
{V1,[]} ->
- {V1,Rest3};
- {V1,V2} when is_list(V2) ->
- {[V1] ++ [intersection|V2],Rest3};
+ {V1,Rest2};
{V1,V2} ->
- {[V1,intersection,V2],Rest3}
+ {{intersection,V1,V2},Rest3}
end;
parse_IElemsRec([{'INTERSECTION',Info}|Rest]) ->
parse_IElemsRec([{'^',Info}|Rest]);
@@ -992,7 +831,7 @@ parse_IntersectionElements(Tokens) ->
case Rest of
[{'EXCEPT',_}|Rest2] ->
{Exclusion,Rest3} = parse_Elements(Rest2),
- {{InterSec,{'EXCEPT',Exclusion}},Rest3};
+ {{'EXCEPT',InterSec,Exclusion},Rest3};
Rest ->
{InterSec,Rest}
end.
@@ -1006,102 +845,73 @@ parse_Elements([{'(',_}|Rest]) ->
case Rest2 of
[{')',_}|Rest3] ->
{Elems,Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,')']}})
+ [_|_] ->
+ parse_error(Rest2)
end;
parse_Elements(Tokens) ->
Flist = [fun parse_ObjectSetElements/1,
fun parse_SubtypeElements/1,
-% fun parse_Value/1,
-% fun parse_Type/1,
fun parse_Object/1,
fun parse_DefinedObjectSet/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- Err = {asn1_error,_} ->
- throw(Err);
- Result = {Val,_} when is_record(Val,type) ->
- Result;
-
- Result ->
- Result
- end.
-
-
+ parse_or(Tokens, Flist).
%% --------------------------
-parse_DefinedObjectClass([{typereference,_,_ModName},{'.',_},Tr={typereference,_,_ObjClName}|Rest]) ->
-%% {{objectclassname,ModName,ObjClName},Rest};
-% {{objectclassname,tref2Exttref(Tr)},Rest};
- {tref2Exttref(Tr),Rest};
+parse_DefinedObjectClass([{typereference,_,ModName},{'.',_},
+ {typereference,Pos,Name}|Tokens]) ->
+ Ext = #'Externaltypereference'{pos=Pos,
+ module=ModName,
+ type=Name},
+ {Ext,Tokens};
parse_DefinedObjectClass([Tr={typereference,_,_ObjClName}|Rest]) ->
-% {{objectclassname,tref2Exttref(Tr)},Rest};
{tref2Exttref(Tr),Rest};
parse_DefinedObjectClass(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- ['typereference . typereference',
- typereference,
- 'TYPE-IDENTIFIER',
- 'ABSTRACT-SYNTAX']]}}).
-
-parse_ObjectClassAssignment([{typereference,L1,ObjClName},{'::=',_}|Rest]) ->
- {Type,Rest2} = parse_ObjectClass(Rest),
- {#classdef{pos=L1,name=ObjClName,module=resolve_module(Type),
- typespec=Type},Rest2};
-parse_ObjectClassAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- 'typereference ::=']}}).
+ parse_error(Tokens).
parse_ObjectClass(Tokens) ->
- Flist = [fun parse_DefinedObjectClass/1,
- fun parse_ObjectClassDefn/1,
- fun parse_ParameterizedObjectClass/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- {asn1_error,Reason2} ->
- throw({asn1_error,Reason2});
- Result ->
- Result
- end.
+ Flist = [fun parse_ObjectClassDefn/1,
+ fun parse_DefinedObjectClass/1],
+ parse_or(Tokens, Flist).
parse_ObjectClassDefn([{'CLASS',_},{'{',_}|Rest]) ->
{Type,Rest2} = parse_FieldSpec(Rest),
{WithSyntaxSpec,Rest3} = parse_WithSyntaxSpec(Rest2),
{#objectclass{fields=Type,syntax=WithSyntaxSpec},Rest3};
parse_ObjectClassDefn(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'CLASS {']}}).
+ parse_error(Tokens).
parse_FieldSpec(Tokens) ->
parse_FieldSpec(Tokens,[]).
-parse_FieldSpec(Tokens,Acc) ->
- Flist = [fun parse_FixedTypeValueFieldSpec/1,
- fun parse_VariableTypeValueFieldSpec/1,
- fun parse_ObjectFieldSpec/1,
- fun parse_FixedTypeValueSetFieldSpec/1,
- fun parse_VariableTypeValueSetFieldSpec/1,
- fun parse_TypeFieldSpec/1,
- fun parse_ObjectSetFieldSpec/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
+parse_FieldSpec(Tokens0, Acc) ->
+ Fl = case Tokens0 of
+ [{valuefieldreference,_,_}|_] ->
+ %% 1) &field Type
+ %% &object CLASS-NAME
+ %% 2) &field &FieldName
+ %% A fixed type field cannot be distinguished from
+ %% an object field without type information.
+ [fun parse_FixedTypeValueFieldSpec/1,
+ fun parse_VariableTypeValueFieldSpec/1];
+ [{typefieldreference,_,_}|_] ->
+ %% 1) &Set Type
+ %% &ObjectSet CLASS-NAME
+ %% 2) &Set &FieldName
+ %% 3) &Type
+ %% A value set and an object cannot be distinguished
+ %% without type information.
+ [fun parse_FixedTypeValueSetFieldSpec/1,
+ fun parse_VariableTypeValueSetFieldSpec/1,
+ fun parse_TypeFieldSpec/1];
+ [_|_] ->
+ parse_error(Tokens0)
+ end,
+ case parse_or(Tokens0, Fl) of
{Type,[{'}',_}|Rest]} ->
- {lists:reverse([Type|Acc]),Rest};
+ {lists:reverse(Acc, [Type]),Rest};
{Type,[{',',_}|Rest2]} ->
- parse_FieldSpec(Rest2,[Type|Acc]);
- {_,[H|_T]} ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'}']}})
+ parse_FieldSpec(Rest2, [Type|Acc])
end.
parse_PrimitiveFieldName([{typefieldreference,_,FieldName}|Rest]) ->
@@ -1109,27 +919,19 @@ parse_PrimitiveFieldName([{typefieldreference,_,FieldName}|Rest]) ->
parse_PrimitiveFieldName([{valuefieldreference,_,FieldName}|Rest]) ->
{{valuefieldreference,FieldName},Rest};
parse_PrimitiveFieldName(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- [typefieldreference,valuefieldreference]]}}).
+ parse_error(Tokens).
parse_FieldName(Tokens) ->
{Field,Rest} = parse_PrimitiveFieldName(Tokens),
parse_FieldName(Rest,[Field]).
-parse_FieldName([{'.',_}|Rest],Acc) ->
- case (catch parse_PrimitiveFieldName(Rest)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- {FieldName,Rest2} ->
- parse_FieldName(Rest2,[FieldName|Acc])
- end;
-parse_FieldName(Tokens,Acc) ->
+parse_FieldName([{'.',_}|Rest0],Acc) ->
+ {FieldName,Rest1} = parse_PrimitiveFieldName(Rest0),
+ parse_FieldName(Rest1, [FieldName|Acc]);
+parse_FieldName(Tokens, Acc) ->
{lists:reverse(Acc),Tokens}.
-parse_FixedTypeValueFieldSpec([{valuefieldreference,L1,VFieldName}|Rest]) ->
+parse_FixedTypeValueFieldSpec([{valuefieldreference,_,VFieldName}|Rest]) ->
{Type,Rest2} = parse_Type(Rest),
{Unique,Rest3} =
case Rest2 of
@@ -1139,109 +941,61 @@ parse_FixedTypeValueFieldSpec([{valuefieldreference,L1,VFieldName}|Rest]) ->
{undefined,Rest2}
end,
{OptionalitySpec,Rest5} = parse_ValueOptionalitySpec(Rest3),
- case {Unique,Rest5} of
- {'UNIQUE',[{Del,_}|_]} when Del =:= ','; Del =:= '}' ->
- case OptionalitySpec of
- {'DEFAULT',_} ->
- throw({asn1_error,
- {L1,get(asn1_module),
- ['UNIQUE and DEFAULT in same field',VFieldName]}});
- _ ->
- {{fixedtypevaluefield,VFieldName,Type,Unique,OptionalitySpec},Rest5}
- end;
- {_,[{Del,_}|_]} when Del =:= ','; Del =:= '}' ->
- {{object_or_fixedtypevalue_field,VFieldName,Type,Unique,OptionalitySpec},Rest5};
- _ ->
- throw({asn1_error,{L1,get(asn1_module),
- [got,get_token(hd(Rest5)),expected,[',','}']]}})
- end;
-parse_FixedTypeValueFieldSpec(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,valuefieldreference]}}).
+ case is_end_delimiter(Rest5) of
+ false -> parse_error(Rest5);
+ true -> ok
+ end,
+ Tag = case Unique of
+ 'UNIQUE' -> fixedtypevaluefield;
+ _ -> object_or_fixedtypevalue_field
+ end,
+ {{Tag,VFieldName,Type,Unique,OptionalitySpec},Rest5}.
+
+parse_VariableTypeValueFieldSpec([{valuefieldreference,_,VFieldName}|Rest0]) ->
+ {FieldRef,Rest1} = parse_FieldName(Rest0),
+ {OptionalitySpec,Rest} = parse_ValueOptionalitySpec(Rest1),
+ case is_end_delimiter(Rest) of
+ true ->
+ {{variabletypevaluefield,VFieldName,FieldRef,OptionalitySpec},
+ Rest};
+ false ->
+ parse_error(Rest)
+ end.
-parse_VariableTypeValueFieldSpec([{valuefieldreference,L,VFieldName}|Rest]) ->
- {FieldRef,Rest2} = parse_FieldName(Rest),
- {OptionalitySpec,Rest3} = parse_ValueOptionalitySpec(Rest2),
- case Rest3 of
- [{Del,_}|_] when Del =:= ','; Del =:= '}' ->
- {{variabletypevaluefield,VFieldName,FieldRef,OptionalitySpec},Rest3};
- _ ->
- throw({asn1_error,{L,get(asn1_module),
- [got,get_token(hd(Rest3)),expected,[',','}']]}})
- end;
-parse_VariableTypeValueFieldSpec(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,valuefieldreference]}}).
+parse_TypeFieldSpec([{typefieldreference,_,Name}|Rest0]) ->
+ {OptionalitySpec,Rest} = parse_TypeOptionalitySpec(Rest0),
+ case is_end_delimiter(Rest) of
+ true ->
+ {{typefield,Name,OptionalitySpec},Rest};
+ false ->
+ parse_error(Rest)
+ end.
-parse_ObjectFieldSpec([{valuefieldreference,L,VFieldName}|Rest]) ->
- {Class,Rest2} = parse_DefinedObjectClass(Rest),
- {OptionalitySpec,Rest3} = parse_ObjectOptionalitySpec(Rest2),
- case Rest3 of
- [{Del,_}|_] when Del =:= ','; Del =:= '}' ->
- {{objectfield,VFieldName,Class,undefined,OptionalitySpec},Rest3};
- _ ->
- throw({asn1_error,{L,get(asn1_module),
- [got,get_token(hd(Rest3)),expected,[',','}']]}})
- end;
-parse_ObjectFieldSpec(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,valuefieldreference]}}).
+parse_FixedTypeValueSetFieldSpec([{typefieldreference,_,Name}|Rest0]) ->
+ {Type,Rest1} = parse_Type(Rest0),
+ {OptionalitySpec,Rest} = parse_ValueSetOptionalitySpec(Rest1),
+ case is_end_delimiter(Rest) of
+ true ->
+ {{objectset_or_fixedtypevalueset_field,Name,Type,
+ OptionalitySpec},Rest};
+ false ->
+ parse_error(Rest)
+ end.
-parse_TypeFieldSpec([{typefieldreference,L,TFieldName}|Rest]) ->
- {OptionalitySpec,Rest2} = parse_TypeOptionalitySpec(Rest),
- case Rest2 of
- [{Del,_}|_] when Del =:= ','; Del =:= '}' ->
- {{typefield,TFieldName,OptionalitySpec},Rest2};
- _ ->
- throw({asn1_error,{L,get(asn1_module),
- [got,get_token(hd(Rest2)),expected,[',','}']]}})
- end;
-parse_TypeFieldSpec(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,typefieldreference]}}).
+parse_VariableTypeValueSetFieldSpec([{typefieldreference,_,Name}|Rest0]) ->
+ {FieldRef,Rest1} = parse_FieldName(Rest0),
+ {OptionalitySpec,Rest} = parse_ValueSetOptionalitySpec(Rest1),
+ case is_end_delimiter(Rest) of
+ true ->
+ {{variabletypevaluesetfield,Name,FieldRef,OptionalitySpec},
+ Rest};
+ false ->
+ parse_error(Rest)
+ end.
-parse_FixedTypeValueSetFieldSpec([{typefieldreference,L,TFieldName}|Rest]) ->
- {Type,Rest2} = parse_Type(Rest),
- {OptionalitySpec,Rest3} = parse_ValueSetOptionalitySpec(Rest2),
- case Rest3 of
- [{Del,_}|_] when Del =:= ','; Del =:= '}' ->
- {{objectset_or_fixedtypevalueset_field,TFieldName,Type,
- OptionalitySpec},Rest3};
- _ ->
- throw({asn1_error,{L,get(asn1_module),
- [got,get_token(hd(Rest3)),expected,[',','}']]}})
- end;
-parse_FixedTypeValueSetFieldSpec(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,typefieldreference]}}).
-
-parse_VariableTypeValueSetFieldSpec([{typefieldreference,L,TFieldName}|Rest]) ->
- {FieldRef,Rest2} = parse_FieldName(Rest),
- {OptionalitySpec,Rest3} = parse_ValueSetOptionalitySpec(Rest2),
- case Rest3 of
- [{Del,_}|_] when Del =:= ','; Del =:= '}' ->
- {{variabletypevaluesetfield,TFieldName,FieldRef,OptionalitySpec},Rest3};
- _ ->
- throw({asn1_error,{L,get(asn1_module),
- [got,get_token(hd(Rest3)),expected,[',','}']]}})
- end;
-parse_VariableTypeValueSetFieldSpec(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,typefieldreference]}}).
-
-parse_ObjectSetFieldSpec([{typefieldreference,L,TFieldName}|Rest]) ->
- {Class,Rest2} = parse_DefinedObjectClass(Rest),
- {OptionalitySpec,Rest3} = parse_ObjectSetOptionalitySpec(Rest2),
- case Rest3 of
- [{Del,_}|_] when Del =:= ','; Del =:= '}' ->
- {{objectsetfield,TFieldName,Class,OptionalitySpec},Rest3};
- _ ->
- throw({asn1_error,{L,get(asn1_module),
- [got,get_token(hd(Rest3)),expected,[',','}']]}})
- end;
-parse_ObjectSetFieldSpec(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,typefieldreference]}}).
+is_end_delimiter([{',',_}|_]) -> true;
+is_end_delimiter([{'}',_}|_]) -> true;
+is_end_delimiter([_|_]) -> false.
parse_ValueOptionalitySpec(Tokens)->
case Tokens of
@@ -1252,15 +1006,6 @@ parse_ValueOptionalitySpec(Tokens)->
_ -> {'MANDATORY',Tokens}
end.
-parse_ObjectOptionalitySpec(Tokens) ->
- case Tokens of
- [{'OPTIONAL',_}|Rest] -> {'OPTIONAL',Rest};
- [{'DEFAULT',_}|Rest] ->
- {Object,Rest2} = parse_Object(Rest),
- {{'DEFAULT',Object},Rest2};
- _ -> {'MANDATORY',Tokens}
- end.
-
parse_TypeOptionalitySpec(Tokens) ->
case Tokens of
[{'OPTIONAL',_}|Rest] -> {'OPTIONAL',Rest};
@@ -1279,65 +1024,44 @@ parse_ValueSetOptionalitySpec(Tokens) ->
_ -> {'MANDATORY',Tokens}
end.
-parse_ObjectSetOptionalitySpec(Tokens) ->
- case Tokens of
- [{'OPTIONAL',_}|Rest] -> {'OPTIONAL',Rest};
- [{'DEFAULT',_}|Rest] ->
- {ObjectSet,Rest2} = parse_ObjectSet(Rest),
- {{'DEFAULT',ObjectSet},Rest2};
- _ -> {'MANDATORY',Tokens}
- end.
-
parse_WithSyntaxSpec([{'WITH',_},{'SYNTAX',_}|Rest]) ->
{SyntaxList,Rest2} = parse_SyntaxList(Rest),
{{'WITH SYNTAX',SyntaxList},Rest2};
parse_WithSyntaxSpec(Tokens) ->
{[],Tokens}.
-parse_SyntaxList([{'{',_},{'}',_}|Rest]) ->
- {[],Rest};
parse_SyntaxList([{'{',_}|Rest]) ->
parse_SyntaxList(Rest,[]);
parse_SyntaxList(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,['{}','{']]}}).
+ parse_error(Tokens).
-parse_SyntaxList(Tokens,Acc) ->
+parse_SyntaxList(Tokens, Acc) ->
{SyntaxList,Rest} = parse_TokenOrGroupSpec(Tokens),
case Rest of
[{'}',_}|Rest2] ->
- {lists:reverse([SyntaxList|Acc]),Rest2};
+ {lists:reverse(Acc, [SyntaxList]),Rest2};
_ ->
- parse_SyntaxList(Rest,[SyntaxList|Acc])
+ parse_SyntaxList(Rest, [SyntaxList|Acc])
end.
parse_TokenOrGroupSpec(Tokens) ->
Flist = [fun parse_RequiredToken/1,
fun parse_OptionalGroup/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
-parse_RequiredToken([{typereference,L1,WordName}|Rest]) ->
+parse_RequiredToken([{typereference,_,WordName}|Rest]=Tokens) ->
case is_word(WordName) of
false ->
- throw({asn1_error,{L1,get(asn1_module),
- [got,WordName,expected,a,'Word']}});
+ parse_error(Tokens);
true ->
{WordName,Rest}
end;
parse_RequiredToken([{',',L1}|Rest]) ->
{{',',L1},Rest};
-parse_RequiredToken([{WordName,L1}|Rest]) ->
+parse_RequiredToken([{WordName,_}|Rest]=Tokens) ->
case is_word(WordName) of
false ->
- throw({asn1_error,{L1,get(asn1_module),
- [got,WordName,expected,a,'Word']}});
+ parse_error(Tokens);
true ->
{WordName,Rest}
end;
@@ -1347,7 +1071,9 @@ parse_RequiredToken(Tokens) ->
parse_OptionalGroup([{'[',_}|Rest]) ->
{Spec,Rest2} = parse_TokenOrGroupSpec(Rest),
{SpecList,Rest3} = parse_OptionalGroup(Rest2,[Spec]),
- {SpecList,Rest3}.
+ {SpecList,Rest3};
+parse_OptionalGroup(Tokens) ->
+ parse_error(Tokens).
parse_OptionalGroup([{']',_}|Rest],Acc) ->
{lists:reverse(Acc),Rest};
@@ -1355,82 +1081,55 @@ parse_OptionalGroup(Tokens,Acc) ->
{Spec,Rest} = parse_TokenOrGroupSpec(Tokens),
parse_OptionalGroup(Rest,[Spec|Acc]).
-parse_DefinedObject([Id={identifier,_,_ObjName}|Rest]) ->
+parse_DefinedObject([#identifier{}=Id|Rest]) ->
{{object,identifier2Extvalueref(Id)},Rest};
-parse_DefinedObject([{typereference,L1,ModName},{'.',_},{identifier,_,ObjName}|Rest]) ->
+parse_DefinedObject([{typereference,L1,ModName},{'.',_},#identifier{val=ObjName}|Rest]) ->
{{object, #'Externaltypereference'{pos=L1,module=ModName,type=ObjName}},Rest};
parse_DefinedObject(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- [identifier,'typereference.identifier']]}}).
+ parse_error(Tokens).
-parse_ObjectAssignment([{identifier,L1,ObjName}|Rest]) ->
+parse_ObjectAssignment([#identifier{pos=L1,val=ObjName}|Rest]) ->
{Class,Rest2} = parse_DefinedObjectClass(Rest),
case Rest2 of
[{'::=',_}|Rest3] ->
{Object,Rest4} = parse_Object(Rest3),
{#typedef{pos=L1,name=ObjName,
typespec=#'Object'{classname=Class,def=Object}},Rest4};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'::=']}});
- Other ->
- throw({asn1_error,{L1,get(asn1_module),
- [got,Other,expected,'::=']}})
- end;
-parse_ObjectAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,identifier]}}).
-
+ _ ->
+ parse_error(Rest2)
+ end.
%% parse_Object(Tokens) -> Ret
%% Tokens = [Tok]
%% Tok = tuple()
%% Ret = {object,_} | {object, _, _}
parse_Object(Tokens) ->
- Flist=[fun parse_ObjectDefn/1,
- fun parse_ObjectFromObject/1,
- fun parse_ParameterizedObject/1,
- fun parse_DefinedObject/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ %% The ObjectFromObject production is not included here,
+ %% since it will have been catched by the ValueFromObject
+ %% before we reach this point.
+ Flist = [fun parse_ObjectDefn/1,
+ fun parse_DefinedObject/1],
+ parse_or(Tokens, Flist).
parse_ObjectDefn(Tokens) ->
Flist=[fun parse_DefaultSyntax/1,
fun parse_DefinedSyntax/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
-parse_DefaultSyntax([{'{',_},{'}',_}|Rest]) ->
- {{object,defaultsyntax,[]},Rest};
parse_DefaultSyntax([{'{',_}|Rest]) ->
parse_DefaultSyntax(Rest,[]);
parse_DefaultSyntax(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,['{}','{']]}}).
+ parse_error(Tokens).
-parse_DefaultSyntax(Tokens,Acc) ->
+parse_DefaultSyntax(Tokens, Acc) ->
{Setting,Rest} = parse_FieldSetting(Tokens),
case Rest of
[{',',_}|Rest2] ->
parse_DefaultSyntax(Rest2,[Setting|Acc]);
[{'}',_}|Rest3] ->
- {{object,defaultsyntax,lists:reverse([Setting|Acc])},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,[',','}']]}})
+ {{object,defaultsyntax,lists:reverse(Acc, [Setting])},Rest3};
+ _ ->
+ parse_error(Rest)
end.
parse_FieldSetting(Tokens) ->
@@ -1439,7 +1138,9 @@ parse_FieldSetting(Tokens) ->
{{PrimFieldName,Setting},Rest2}.
parse_DefinedSyntax([{'{',_}|Rest]) ->
- parse_DefinedSyntax(Rest,[]).
+ parse_DefinedSyntax(Rest, []);
+parse_DefinedSyntax(Tokens) ->
+ parse_error(Tokens).
parse_DefinedSyntax(Tokens,Acc) ->
case Tokens of
@@ -1455,95 +1156,70 @@ parse_DefinedSyntax(Tokens,Acc) ->
%% Literal ::= word | ','
%% Setting ::= Type | Value | ValueSet | Object | ObjectSet
%% word equals typereference, but no lower cases
-parse_DefinedSyntaxToken([{',',L1}|Rest]) ->
- {{',',L1},Rest};
+parse_DefinedSyntaxToken([{',',_}=Comma|Rest]) ->
+ {Comma,Rest};
%% ObjectClassFieldType or a defined type with a constraint.
%% Should also be able to parse a parameterized type. It may be
%% impossible to distinguish between a parameterized type and a Literal
%% followed by an object set.
-parse_DefinedSyntaxToken(Tokens=[{typereference,L1,_Name},{T,_}|_Rest])
- when T == '.'; T == '(' ->
- case catch parse_Setting(Tokens) of
- {asn1_error,_} ->
- throw({asn1_error,{L1,get(asn1_module),
- [got,hd(Tokens), expected,['Word',setting]]}});
- {'EXIT',Reason} ->
- exit(Reason);
- Result ->
- Result
- end;
-parse_DefinedSyntaxToken(Tokens=[TRef={typereference,L1,Name}|Rest]) ->
+parse_DefinedSyntaxToken([{typereference,_,_Name},{T,_}|_]=Tokens)
+ when T =:= '.'; T =:= '(' ->
+ parse_Setting(Tokens);
+parse_DefinedSyntaxToken([{typereference,L1,Name}=TRef|Rest]=Tokens) ->
case is_word(Name) of
false ->
case lookahead_definedsyntax(Rest) of
word_or_setting ->
{{setting,L1,tref2Exttref(TRef)},Rest};
- _ ->
+ setting ->
parse_Setting(Tokens)
end;
true ->
- %% {{word_or_setting,L1,Name},Rest}
{{word_or_setting,L1,tref2Exttref(TRef)},Rest}
end;
parse_DefinedSyntaxToken(Tokens) ->
- case catch parse_Setting(Tokens) of
- {asn1_error,_} ->
- parse_Word(Tokens);
- {'EXIT',Reason} ->
- exit(Reason);
- Result ->
+ try parse_Setting(Tokens) of
+ {_,_}=Result ->
Result
+ catch
+ throw:{asn1_error,_} ->
+ parse_Word(Tokens)
end.
lookahead_definedsyntax([{typereference,_,Name}|_Rest]) ->
- case is_word(Name) of
+ case is_word(Name) of
true -> word_or_setting;
- _ -> setting
+ false -> setting
end;
lookahead_definedsyntax([{'}',_}|_Rest]) ->
word_or_setting;
lookahead_definedsyntax(_) ->
setting.
-parse_Word([{Name,Pos}|Rest]) ->
+parse_Word([{Name,Pos}|Rest]=Tokens) ->
case is_word(Name) of
false ->
- throw({asn1_error,{Pos,get(asn1_module),
- [got,Name, expected,a,'Word']}});
+ parse_error(Tokens);
true ->
{{word_or_setting,Pos,tref2Exttref(Pos,Name)},Rest}
- end.
+ end;
+parse_Word(Tokens) ->
+ parse_error(Tokens).
parse_Setting(Tokens) ->
Flist = [{type_tag,fun parse_Type/1},
{value_tag,fun parse_Value/1},
{object_tag,fun parse_Object/1},
{objectset_tag,fun parse_ObjectSet/1}],
- case (catch parse_or_tag(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result = {{value_tag,_},_} ->
+ case parse_or_tag(Tokens, Flist) of
+ {{value_tag,_},_}=Result ->
+ %% Keep the value_tag.
Result;
{{Tag,Setting},Rest} when is_atom(Tag) ->
+ %% Remove all other tags.
{Setting,Rest}
end.
-%% parse_Setting(Tokens) ->
-%% Flist = [fun parse_Type/1,
-%% fun parse_Value/1,
-%% fun parse_Object/1,
-%% fun parse_ObjectSet/1],
-%% case (catch parse_or(Tokens,Flist)) of
-%% {'EXIT',Reason} ->
-%% exit(Reason);
-%% AsnErr = {asn1_error,_} ->
-%% throw(AsnErr);
-%% Result ->
-%% Result
-%% end.
-
parse_DefinedObjectSet([{typereference,L1,ModuleName},{'.',_},
{typereference,L2,ObjSetName}|Rest]) ->
{{objectset,L1,#'Externaltypereference'{pos=L2,module=ModuleName,
@@ -1552,9 +1228,7 @@ parse_DefinedObjectSet([{typereference,L1,ObjSetName}|Rest]) ->
{{objectset,L1,#'Externaltypereference'{pos=L1,module=resolve_module(ObjSetName),
type=ObjSetName}},Rest};
parse_DefinedObjectSet(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- [typereference,'typereference.typereference']]}}).
+ parse_error(Tokens).
parse_ObjectSetAssignment([{typereference,L1,ObjSetName}|Rest]) ->
{Class,Rest2} = parse_DefinedObjectClass(Rest),
@@ -1564,16 +1238,9 @@ parse_ObjectSetAssignment([{typereference,L1,ObjSetName}|Rest]) ->
{#typedef{pos=L1,name=ObjSetName,
typespec=#'ObjectSet'{class=Class,
set=ObjectSet}},Rest4};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'::=']}})
-%%% Other ->
-%%% throw(Other)
- end;
-parse_ObjectSetAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- typereference]}}).
+ _ ->
+ parse_error(Rest2)
+ end.
%% parse_ObjectSet(Tokens) -> {Ret,Rest}
%% Tokens = [Tok]
@@ -1590,26 +1257,20 @@ parse_ObjectSet([{'{',_}|Rest]) ->
case Rest2 of
[{'}',_}|Rest3] ->
{ObjSetSpec,Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'}']}})
+ _ ->
+ parse_error(Rest2)
end;
parse_ObjectSet(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'{']}}).
+ parse_error(Tokens).
-parse_ObjectSetSpec([{'...',_}|Rest]) ->
- case Rest of
- [{',',_}|Rest2] ->
- {Elements,Rest3}=parse_ElementSetSpecs(Rest2),
- {{[],Elements},Rest3};
- _ ->
- {['EXTENSIONMARK'],Rest}
- end;
+parse_ObjectSetSpec([{'...',_},{',',_}|Tokens0]) ->
+ {Elements,Tokens} = parse_ElementSetSpec(Tokens0),
+ {{element_set,empty,Elements},Tokens};
+parse_ObjectSetSpec([{'...',_}|Tokens]) ->
+ {{element_set,empty,empty},Tokens};
parse_ObjectSetSpec(Tokens) ->
parse_ElementSetSpecs(Tokens).
-% moved fun parse_Object/1 and fun parse_DefinedObjectSet/1 to parse_Elements
%% parse_ObjectSetElements(Tokens) -> {Result,Rest}
%% Result ::= {'ObjectSetFromObjects',Objects,Name} | {pos,ObjectSet,Params}
%% Objects ::= ReferencedObjects
@@ -1619,18 +1280,9 @@ parse_ObjectSetSpec(Tokens) ->
%% ObjectSet ::= {objectset,integer(),#'Externaltypereference'{}}
%% Params ::= list() (see parse_ActualParameterList/1)
parse_ObjectSetElements(Tokens) ->
- Flist = [%fun parse_Object/1,
- %fun parse_DefinedObjectSet/1,
- fun parse_ObjectSetFromObjects/1,
+ Flist = [fun parse_ObjectSetFromObjects/1,
fun parse_ParameterizedObjectSet/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_ObjectClassFieldType(Tokens) ->
{Class,Rest} = parse_DefinedObjectClass(Tokens),
@@ -1641,25 +1293,10 @@ parse_ObjectClassFieldType(Tokens) ->
classname=Class,
class=Class,fieldname=FieldName},
{#type{def=OCFT},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'.']}})
-%%% Other ->
-%%% throw(Other)
+ _ ->
+ parse_error(Rest)
end.
-%parse_ObjectClassFieldValue(Tokens) ->
-% Flist = [fun parse_OpenTypeFieldVal/1,
-% fun parse_FixedTypeFieldVal/1],
-% case (catch parse_or(Tokens,Flist)) of
-% {'EXIT',Reason} ->
-% throw(Reason);
-% AsnErr = {asn1_error,_} ->
-% throw(AsnErr);
-% Result ->
-% Result
-% end.
-
parse_ObjectClassFieldValue(Tokens) ->
parse_OpenTypeFieldVal(Tokens).
@@ -1669,28 +1306,10 @@ parse_OpenTypeFieldVal(Tokens) ->
[{':',_}|Rest2] ->
{Value,Rest3} = parse_Value(Rest2),
{{opentypefieldvalue,Type,Value},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,':']}})
+ _ ->
+ parse_error(Rest)
end.
-% parse_FixedTypeFieldVal(Tokens) ->
-% parse_Value(Tokens).
-
-% parse_InformationFromObjects(Tokens) ->
-% Flist = [fun parse_ValueFromObject/1,
-% fun parse_ValueSetFromObjects/1,
-% fun parse_TypeFromObject/1,
-% fun parse_ObjectFromObject/1],
-% case (catch parse_or(Tokens,Flist)) of
-% {'EXIT',Reason} ->
-% throw(Reason);
-% AsnErr = {asn1_error,_} ->
-% throw(AsnErr);
-% Result ->
-% Result
-% end.
-
%% parse_ReferencedObjects(Tokens) -> {Result,Rest}
%% Result ::= DefObject | DefObjSet |
%% {po,DefObject,Params} | {pos,DefObjSet,Params} |
@@ -1702,18 +1321,11 @@ parse_OpenTypeFieldVal(Tokens) ->
parse_ReferencedObjects(Tokens) ->
Flist = [fun parse_DefinedObject/1,
fun parse_DefinedObjectSet/1,
- fun parse_ParameterizedObject/1,
fun parse_ParameterizedObjectSet/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_ValueFromObject(Tokens) ->
+ %% This production also matches ObjectFromObject.
{Objects,Rest} = parse_ReferencedObjects(Tokens),
case Rest of
[{'.',_}|Rest2] ->
@@ -1722,35 +1334,10 @@ parse_ValueFromObject(Tokens) ->
{valuefieldreference,_} ->
{{'ValueFromObject',Objects,Name},Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,typefieldreference,expected,
- valuefieldreference]}})
+ parse_error(Rest2)
end;
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'.']}})
-%%% Other ->
-%%% throw({asn1_error,{got,Other,expected,'.'}})
- end.
-
-parse_ValueSetFromObjects(Tokens) ->
- {Objects,Rest} = parse_ReferencedObjects(Tokens),
- case Rest of
- [{'.',_}|Rest2] ->
- {Name,Rest3} = parse_FieldName(Rest2),
- case lists:last(Name) of
- {typefieldreference,_FieldName} ->
- {{'ValueSetFromObjects',Objects,Name},Rest3};
- _ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,
- typefieldreference]}})
- end;
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'.']}})
-%%% Other ->
-%%% throw({asn1_error,{got,Other,expected,'.'}})
+ _ ->
+ parse_error(Rest)
end.
parse_TypeFromObject(Tokens) ->
@@ -1760,28 +1347,12 @@ parse_TypeFromObject(Tokens) ->
{Name,Rest3} = parse_FieldName(Rest2),
case lists:last(Name) of
{typefieldreference,_FieldName} ->
- {{'TypeFromObject',Objects,Name},Rest3};
+ {#type{def={'TypeFromObject',Objects,Name}},Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,
- typefieldreference]}})
+ parse_error(Rest2)
end;
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'.']}})
-%%% Other ->
-%%% throw({asn1_error,{got,Other,expected,'.'}})
- end.
-
-parse_ObjectFromObject(Tokens) ->
- {Objects,Rest} = parse_ReferencedObjects(Tokens),
- case Rest of
- [{'.',_}|Rest2] ->
- {Name,Rest3} = parse_FieldName(Rest2),
- {{'ObjectFromObject',Objects,Name},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'.']}})
+ _ ->
+ parse_error(Rest)
end.
%% parse_ObjectSetFromObjects(Tokens) -> {Result,Rest}
@@ -1799,23 +1370,12 @@ parse_ObjectSetFromObjects(Tokens) ->
{typefieldreference,_FieldName} ->
{{'ObjectSetFromObjects',Objects,Name},Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,
- typefieldreference]}})
+ parse_error(Rest2)
end;
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'.']}})
+ _ ->
+ parse_error(Rest)
end.
-% parse_InstanceOfType([{'INSTANCE',_},{'OF',_}|Rest]) ->
-% {Class,Rest2} = parse_DefinedObjectClass(Rest),
-% {{'InstanceOfType',Class},Rest2}.
-
-% parse_InstanceOfValue(Tokens) ->
-% parse_Value(Tokens).
-
-
%% X.682 constraint specification
@@ -1823,14 +1383,7 @@ parse_GeneralConstraint(Tokens) ->
Flist = [fun parse_UserDefinedConstraint/1,
fun parse_TableConstraint/1,
fun parse_ContentsConstraint/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_UserDefinedConstraint([{'CONSTRAINED',_},{'BY',_},{'{',_},{'}',_}|Rest])->
{{constrained_by,[]},Rest};
@@ -1841,32 +1394,23 @@ parse_UserDefinedConstraint([{'CONSTRAINED',_},
case Rest2 of
[{'}',_}|Rest3] ->
{{constrained_by,Param},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'}']}})
+ _ ->
+ parse_error(Rest2)
end;
parse_UserDefinedConstraint(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- ['CONSTRAINED BY {}','CONSTRAINED BY {']]}}).
+ parse_error(Tokens).
parse_UserDefinedConstraintParameter(Tokens) ->
- parse_UserDefinedConstraintParameter(Tokens,[]).
-parse_UserDefinedConstraintParameter(Tokens,Acc) ->
+ parse_UserDefinedConstraintParameter(Tokens, []).
+
+parse_UserDefinedConstraintParameter(Tokens0, Acc) ->
Flist = [fun parse_GovernorAndActualParameter/1,
fun parse_ActualParameter/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- {Result,Rest} ->
- case Rest of
- [{',',_}|_Rest2] ->
- parse_UserDefinedConstraintParameter(Tokens,[Result|Acc]);
- _ ->
- {lists:reverse([Result|Acc]),Rest}
- end
+ case parse_or(Tokens0, Flist) of
+ {Result,[{',',_}|Tokens]} ->
+ parse_UserDefinedConstraintParameter(Tokens, [Result|Acc]);
+ {Result,Tokens} ->
+ {lists:reverse(Acc, [Result]),Tokens}
end.
parse_GovernorAndActualParameter(Tokens) ->
@@ -1875,26 +1419,18 @@ parse_GovernorAndActualParameter(Tokens) ->
[{':',_}|Rest2] ->
{Params,Rest3} = parse_ActualParameter(Rest2),
{{'Governor_Params',Governor,Params},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,':']}})
+ _ ->
+ parse_error(Rest)
end.
parse_TableConstraint(Tokens) ->
Flist = [fun parse_ComponentRelationConstraint/1,
fun parse_SimpleTableConstraint/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_SimpleTableConstraint(Tokens) ->
{ObjectSet,Rest} = parse_ObjectSet(Tokens),
- {{simpletable,ObjectSet},Rest}.
+ {{element_set,{simpletable,ObjectSet},none},Rest}.
parse_ComponentRelationConstraint([{'{',_}|Rest]) ->
{ObjectSet,Rest2} = parse_DefinedObjectSet(Rest),
@@ -1903,21 +1439,18 @@ parse_ComponentRelationConstraint([{'{',_}|Rest]) ->
{AtNot,Rest4} = parse_AtNotationList(Rest3,[]),
case Rest4 of
[{'}',_}|Rest5] ->
- {{componentrelation,ObjectSet,AtNot},Rest5};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'}']}})
+ Ret = {element_set,
+ {componentrelation,ObjectSet,AtNot},
+ none},
+ {Ret,Rest5};
+ _ ->
+ parse_error(Rest4)
end;
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,
- 'ComponentRelationConstraint',ended,with,'}']}})
-%%% Other ->
-%%% throw(Other)
+ _ ->
+ parse_error(Rest2)
end;
parse_ComponentRelationConstraint(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'{']}}).
+ parse_error(Tokens).
parse_AtNotationList(Tokens,Acc) ->
{AtNot,Rest} = parse_AtNotation(Tokens),
@@ -1925,7 +1458,7 @@ parse_AtNotationList(Tokens,Acc) ->
[{',',_}|Rest2] ->
parse_AtNotationList(Rest2,[AtNot|Acc]);
_ ->
- {lists:reverse([AtNot|Acc]),Rest}
+ {lists:reverse(Acc, [AtNot]),Rest}
end.
parse_AtNotation([{'@',_},{'.',_}|Rest]) ->
@@ -1935,20 +1468,17 @@ parse_AtNotation([{'@',_}|Rest]) ->
{CIdList,Rest2} = parse_ComponentIdList(Rest),
{{outermost,CIdList},Rest2};
parse_AtNotation(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,['@','@.']]}}).
+ parse_error(Tokens).
parse_ComponentIdList(Tokens) ->
parse_ComponentIdList(Tokens,[]).
-parse_ComponentIdList([Id = {identifier,_,_},{'.',_}|Rest],Acc) ->
+parse_ComponentIdList([#identifier{}=Id,{'.',_}|Rest], Acc) ->
parse_ComponentIdList(Rest,[identifier2Extvalueref(Id)|Acc]);
-parse_ComponentIdList([Id = {identifier,_,_}|Rest],Acc) ->
- {lists:reverse([identifier2Extvalueref(Id)|Acc]),Rest};
+parse_ComponentIdList([#identifier{}=Id|Rest], Acc) ->
+ {lists:reverse(Acc, [identifier2Extvalueref(Id)]),Rest};
parse_ComponentIdList(Tokens,_) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- [identifier,'identifier.']]}}).
+ parse_error(Tokens).
parse_ContentsConstraint([{'CONTAINING',_}|Rest]) ->
{Type,Rest2} = parse_Type(Rest),
@@ -1963,24 +1493,14 @@ parse_ContentsConstraint([{'ENCODED',_},{'BY',_}|Rest]) ->
{Value,Rest2} = parse_Value(Rest),
{{contentsconstraint,[],Value},Rest2};
parse_ContentsConstraint(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- 'CONTAINING','or','ENCODED BY']}}).
-
+ parse_error(Tokens).
% X.683 Parameterization of ASN.1 specifications
parse_Governor(Tokens) ->
Flist = [fun parse_Type/1,
fun parse_DefinedObjectClass/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_ActualParameter(Tokens) ->
Flist = [fun parse_Type/1,
@@ -1989,32 +1509,7 @@ parse_ActualParameter(Tokens) ->
fun parse_DefinedObjectClass/1,
fun parse_Object/1,
fun parse_ObjectSet/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
-
-parse_ParameterizedAssignment(Tokens) ->
- Flist = [fun parse_ParameterizedTypeAssignment/1,
- fun parse_ParameterizedValueAssignment/1,
- fun parse_ParameterizedValueSetTypeAssignment/1,
- fun parse_ParameterizedObjectClassAssignment/1,
- fun parse_ParameterizedObjectAssignment/1,
- fun parse_ParameterizedObjectSetAssignment/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- AsnAssErr = {asn1_assignment_error,_} ->
- throw(AsnAssErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
%% parse_ParameterizedTypeAssignment(Tokens) -> Result
%% Result = {#ptypedef{},Rest} | throw()
@@ -2025,18 +1520,13 @@ parse_ParameterizedTypeAssignment([{typereference,L1,Name}|Rest]) ->
{Type,Rest4} = parse_Type(Rest3),
{#ptypedef{pos=L1,name=Name,args=ParameterList,typespec=Type},
Rest4};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'::=']}})
- end;
-parse_ParameterizedTypeAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- typereference]}}).
+ _ ->
+ parse_error(Rest2)
+ end.
%% parse_ParameterizedValueAssignment(Tokens) -> Result
%% Result = {#pvaluedef{},Rest} | throw()
-parse_ParameterizedValueAssignment([{identifier,L1,Name}|Rest]) ->
+parse_ParameterizedValueAssignment([#identifier{pos=L1,val=Name}|Rest]) ->
{ParameterList,Rest2} = parse_ParameterList(Rest),
{Type,Rest3} = parse_Type(Rest2),
case Rest3 of
@@ -2044,13 +1534,9 @@ parse_ParameterizedValueAssignment([{identifier,L1,Name}|Rest]) ->
{Value,Rest5} = parse_Value(Rest4),
{#pvaluedef{pos=L1,name=Name,args=ParameterList,type=Type,
value=Value},Rest5};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'::=']}})
- end;
-parse_ParameterizedValueAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,identifier]}}).
+ _ ->
+ parse_error(Rest3)
+ end.
%% parse_ParameterizedValueSetTypeAssignment(Tokens) -> Result
%% Result = {#pvaluesetdef{},Rest} | throw()
@@ -2062,14 +1548,9 @@ parse_ParameterizedValueSetTypeAssignment([{typereference,L1,Name}|Rest]) ->
{ValueSet,Rest5} = parse_ValueSet(Rest4),
{#pvaluesetdef{pos=L1,name=Name,args=ParameterList,
type=Type,valueset=ValueSet},Rest5};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'::=']}})
- end;
-parse_ParameterizedValueSetTypeAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- typereference]}}).
+ _ ->
+ parse_error(Rest3)
+ end.
%% parse_ParameterizedObjectClassAssignment(Tokens) -> Result
%% Result = {#ptypedef{},Rest} | throw()
@@ -2080,18 +1561,13 @@ parse_ParameterizedObjectClassAssignment([{typereference,L1,Name}|Rest]) ->
{Class,Rest4} = parse_ObjectClass(Rest3),
{#ptypedef{pos=L1,name=Name,args=ParameterList,typespec=Class},
Rest4};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'::=']}})
- end;
-parse_ParameterizedObjectClassAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- typereference]}}).
+ _ ->
+ parse_error(Rest2)
+ end.
%% parse_ParameterizedObjectAssignment(Tokens) -> Result
%% Result = {#pobjectdef{},Rest} | throw()
-parse_ParameterizedObjectAssignment([{identifier,L1,Name}|Rest]) ->
+parse_ParameterizedObjectAssignment([#identifier{pos=L1,val=Name}|Rest]) ->
{ParameterList,Rest2} = parse_ParameterList(Rest),
{Class,Rest3} = parse_DefinedObjectClass(Rest2),
case Rest3 of
@@ -2099,36 +1575,9 @@ parse_ParameterizedObjectAssignment([{identifier,L1,Name}|Rest]) ->
{Object,Rest5} = parse_Object(Rest4),
{#pobjectdef{pos=L1,name=Name,args=ParameterList,
class=Class,def=Object},Rest5};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'::=']}})
-%%% Other ->
-%%% throw(Other)
- end;
-parse_ParameterizedObjectAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,identifier]}}).
-
-%% parse_ParameterizedObjectSetAssignment(Tokens) -> Result
-%% Result = {#pobjectsetdef{},Rest} | throw{}
-parse_ParameterizedObjectSetAssignment([{typereference,L1,Name}|Rest]) ->
- {ParameterList,Rest2} = parse_ParameterList(Rest),
- {Class,Rest3} = parse_DefinedObjectClass(Rest2),
- case Rest3 of
- [{'::=',_}|Rest4] ->
- {ObjectSet,Rest5} = parse_ObjectSet(Rest4),
- {#pobjectsetdef{pos=L1,name=Name,args=ParameterList,
- class=Class,def=ObjectSet},Rest5};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'::=']}})
-%%% Other ->
-%%% throw(Other)
- end;
-parse_ParameterizedObjectSetAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- typereference]}}).
+ _ ->
+ parse_error(Rest3)
+ end.
%% parse_ParameterList(Tokens) -> Result
%% Result = [Parameter]
@@ -2137,35 +1586,24 @@ parse_ParameterizedObjectSetAssignment(Tokens) ->
%% Type = #type{}
%% DefinedObjectClass = #'Externaltypereference'{}
%% Reference = #'Externaltypereference'{} | #'Externalvaluereference'{}
-parse_ParameterList([{'{',_}|Rest]) ->
- parse_ParameterList(Rest,[]);
-parse_ParameterList(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'{']}}).
+parse_ParameterList([{'{',_}|Tokens]) ->
+ parse_ParameterList(Tokens, []).
parse_ParameterList(Tokens,Acc) ->
{Parameter,Rest} = parse_Parameter(Tokens),
case Rest of
[{',',_}|Rest2] ->
- parse_ParameterList(Rest2,[Parameter|Acc]);
+ parse_ParameterList(Rest2, [Parameter|Acc]);
[{'}',_}|Rest3] ->
- {lists:reverse([Parameter|Acc]),Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,[',','}']]}})
+ {lists:reverse(Acc, [Parameter]),Rest3};
+ _ ->
+ parse_error(Rest)
end.
parse_Parameter(Tokens) ->
Flist = [fun parse_ParamGovAndRef/1,
fun parse_Reference/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_ParamGovAndRef(Tokens) ->
{ParamGov,Rest} = parse_ParamGovernor(Tokens),
@@ -2173,86 +1611,54 @@ parse_ParamGovAndRef(Tokens) ->
[{':',_}|Rest2] ->
{Ref,Rest3} = parse_Reference(Rest2),
{{ParamGov,Ref},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,':']}})
+ _ ->
+ parse_error(Rest)
end.
parse_ParamGovernor(Tokens) ->
Flist = [fun parse_Governor/1,
fun parse_Reference/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
-
-% parse_ParameterizedReference(Tokens) ->
-% {Ref,Rest} = parse_Reference(Tokens),
-% case Rest of
-% [{'{',_},{'}',_}|Rest2] ->
-% {{ptref,Ref},Rest2};
-% _ ->
-% {{ptref,Ref},Rest}
-% end.
+ parse_or(Tokens, Flist).
parse_SimpleDefinedType([{typereference,L1,ModuleName},{'.',_},
{typereference,_,TypeName}|Rest]) ->
{#'Externaltypereference'{pos=L1,module=ModuleName,
type=TypeName},Rest};
parse_SimpleDefinedType([Tref={typereference,_,_}|Rest]) ->
-% {#'Externaltypereference'{pos=L2,module=get(asn1_module),
-% type=TypeName},Rest};
{tref2Exttref(Tref),Rest};
parse_SimpleDefinedType(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- [typereference,'typereference.typereference']]}}).
+ parse_error(Tokens).
parse_SimpleDefinedValue([{typereference,L1,ModuleName},{'.',_},
- {identifier,_,Value}|Rest]) ->
+ #identifier{val=Value}|Rest]) ->
{{simpledefinedvalue,#'Externalvaluereference'{pos=L1,module=ModuleName,
value=Value}},Rest};
-parse_SimpleDefinedValue([Id={identifier,_,_Value}|Rest]) ->
+parse_SimpleDefinedValue([#identifier{}=Id|Rest]) ->
{{simpledefinedvalue,identifier2Extvalueref(Id)},Rest};
parse_SimpleDefinedValue(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- ['typereference.identifier',identifier]]}}).
+ parse_error(Tokens).
parse_ParameterizedType(Tokens) ->
+ %% May also be a parameterized class.
{Type,Rest} = parse_SimpleDefinedType(Tokens),
{Params,Rest2} = parse_ActualParameterList(Rest),
- {{pt,Type,Params},Rest2}.
+ {#type{def={pt,Type,Params}},Rest2}.
parse_ParameterizedValue(Tokens) ->
+ %% May also be a parameterized object.
{Value,Rest} = parse_SimpleDefinedValue(Tokens),
{Params,Rest2} = parse_ActualParameterList(Rest),
{{pv,Value,Params},Rest2}.
-parse_ParameterizedObjectClass(Tokens) ->
- {Type,Rest} = parse_DefinedObjectClass(Tokens),
- {Params,Rest2} = parse_ActualParameterList(Rest),
- {{poc,Type,Params},Rest2}.
-
parse_ParameterizedObjectSet(Tokens) ->
{ObjectSet,Rest} = parse_DefinedObjectSet(Tokens),
{Params,Rest2} = parse_ActualParameterList(Rest),
{{pos,ObjectSet,Params},Rest2}.
-parse_ParameterizedObject(Tokens) ->
- {Object,Rest} = parse_DefinedObject(Tokens),
- {Params,Rest2} = parse_ActualParameterList(Rest),
- {{po,Object,Params},Rest2}.
-
parse_ActualParameterList([{'{',_}|Rest]) ->
parse_ActualParameterList(Rest,[]);
parse_ActualParameterList(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'{']}}).
+ parse_error(Tokens).
parse_ActualParameterList(Tokens,Acc) ->
{Parameter,Rest} = parse_ActualParameter(Tokens),
@@ -2260,43 +1666,22 @@ parse_ActualParameterList(Tokens,Acc) ->
[{',',_}|Rest2] ->
parse_ActualParameterList(Rest2,[Parameter|Acc]);
[{'}',_}|Rest3] ->
- {lists:reverse([Parameter|Acc]),Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,[',','}']]}})
-%%% Other ->
-%%% throw(Other)
+ {lists:reverse(Acc, [Parameter]),Rest3};
+ _ ->
+ parse_error(Rest)
end.
-
-
-
-
-
-
-%-------------------------
-
+%% Test whether Token is allowed in a syntax list.
is_word(Token) ->
- case not_allowed_word(Token) of
+ List = atom_to_list(Token),
+ case not_allowed_word(List) of
true -> false;
- _ ->
- if
- is_atom(Token) ->
- Item = atom_to_list(Token),
- is_word(Item);
- is_list(Token), length(Token) == 1 ->
- check_one_char_word(Token);
- is_list(Token) ->
- [A|Rest] = Token,
- case check_first(A) of
- true ->
- check_rest(Rest);
- _ ->
- false
- end
- end
+ false -> is_word_1(List)
end.
+is_word_1([H|T]) ->
+ check_first(H) andalso check_rest(T).
+
not_allowed_word(Name) ->
lists:member(Name,["BIT",
"BOOLEAN",
@@ -2321,257 +1706,123 @@ not_allowed_word(Name) ->
"TRUE",
"UNION"]).
-check_one_char_word([A]) when $A =< A, $Z >= A ->
- true;
-check_one_char_word([_]) ->
- false. %% unknown item in SyntaxList
+check_first(C) ->
+ $A =< C andalso C =< $Z.
-check_first(A) when $A =< A, $Z >= A ->
- true;
-check_first(_) ->
- false. %% unknown item in SyntaxList
-
-check_rest([R,R|_Rs]) when $- == R ->
- false; %% two consecutive hyphens are not allowed in a word
-check_rest([R]) when $- == R ->
- false; %% word cannot end with hyphen
-check_rest([R|Rs]) when $A=<R, $Z>=R; $-==R ->
+check_rest([R|Rs]) when $A =< R, R =< $Z; R =:= $- ->
check_rest(Rs);
check_rest([]) ->
true;
check_rest(_) ->
false.
+%%%
+%%% Parse alternative type lists for CHOICE.
+%%%
+
+parse_AlternativeTypeLists(Tokens0) ->
+ {Root,Tokens1} = parse_AlternativeTypeList(Tokens0),
+ case Tokens1 of
+ [{',',_}|Tokens2] ->
+ {ExtMarker,Tokens3} = parse_ExtensionAndException(Tokens2),
+ {ExtAlts,Tokens4} = parse_ExtensionAdditionAlternatives(Tokens3),
+ {_,Tokens} = parse_OptionalExtensionMarker(Tokens4, []),
+ {Root++ExtMarker++ExtAlts,Tokens};
+ Tokens ->
+ {Root,Tokens}
+ end.
+
+parse_ExtensionAndException([{'...',L}|Tokens0]) ->
+ {[#'EXTENSIONMARK'{pos=L}],
+ case Tokens0 of
+ [{'!',_}|Tokens1] ->
+ {_,Tokens} = parse_ExceptionIdentification(Tokens1),
+ Tokens;
+ _ ->
+ Tokens0
+ end}.
+
+parse_AlternativeTypeList([#identifier{}|_]=Tokens0) ->
+ {AltType,Tokens} = parse_NamedType(Tokens0),
+ parse_AlternativeTypeList_1(Tokens, [AltType]);
+parse_AlternativeTypeList(Tokens) ->
+ parse_error(Tokens).
+
+parse_AlternativeTypeList_1([{',',_}|[#identifier{}|_]=Tokens0], Acc) ->
+ {AltType,Tokens} = parse_NamedType(Tokens0),
+ parse_AlternativeTypeList_1(Tokens, [AltType|Acc]);
+parse_AlternativeTypeList_1(Tokens, Acc) ->
+ {lists:reverse(Acc),Tokens}.
-to_set(V) when is_list(V) ->
- ordsets:from_list(V);
-to_set(V) ->
- ordsets:from_list([V]).
-
-parse_AlternativeTypeLists(Tokens) ->
- parse_AlternativeTypeLists(Tokens,[]).
-
-parse_AlternativeTypeLists(Tokens = [{identifier,_,_}|_Rest0],Clist) ->
- {CompList,Rest1} = parse_AlternativeTypeList(Tokens,[]),
- parse_AlternativeTypeLists(Rest1,Clist++CompList);
-parse_AlternativeTypeLists([{'...',L1},{'!',_}|Rest02],Clist0) ->
- {_,Rest03} = parse_ExceptionIdentification(Rest02),
- %% Exception info is currently thrown away
- parse_AlternativeTypeLists2(Rest03,Clist0++[#'EXTENSIONMARK'{pos=L1}]);
-parse_AlternativeTypeLists([{',',L1},{'...',_},{'!',_}|Rest02],Clist0) when Clist0 =/= []->
- {_,Rest03} = parse_ExceptionIdentification(Rest02),
- %% Exception info is currently thrown away
- parse_AlternativeTypeLists2(Rest03,Clist0++[#'EXTENSIONMARK'{pos=L1}]);
-
-parse_AlternativeTypeLists([{',',_},{'...',L1}|Rest02],Clist0) when Clist0 =/= []->
- parse_AlternativeTypeLists2(Rest02,Clist0++[#'EXTENSIONMARK'{pos=L1}]);
-parse_AlternativeTypeLists([{'...',L1}|Rest02],Clist0) ->
- parse_AlternativeTypeLists2(Rest02,Clist0++[#'EXTENSIONMARK'{pos=L1}]);
-parse_AlternativeTypeLists(Tokens = [{'}',_L1}|_Rest02],Clist0) ->
- {Clist0,Tokens}.
-
-parse_AlternativeTypeLists2(Tokens,Clist) ->
- {ExtAdd,Rest} = parse_ExtensionAdditionAlternatives(Tokens,Clist),
- {Clist2,Rest2} = parse_OptionalExtensionMarker(Rest,lists:flatten(ExtAdd)),
- case Rest2 of
- [{',',_}|Rest3] ->
- {CompList,Rest4} = parse_AlternativeTypeList(Rest3,[]),
- {Clist2 ++ CompList,Rest4};
- _ ->
- {Clist2,Rest2}
- end.
-
-
-
-parse_AlternativeTypeList([{',',_},Id = {identifier,_,_}|Rest],Acc) when Acc =/= [] ->
- {AlternativeType,Rest2} = parse_NamedType([Id|Rest]),
- parse_AlternativeTypeList(Rest2,[AlternativeType|Acc]);
-parse_AlternativeTypeList(Tokens = [{'}',_}|_],Acc) ->
- {lists:reverse(Acc),Tokens};
-parse_AlternativeTypeList(Tokens = [{']',_},{']',_}|_],Acc) ->
- {lists:reverse(Acc),Tokens};
-parse_AlternativeTypeList(Tokens = [{',',_},{'...',_}|_],Acc) ->
- {lists:reverse(Acc),Tokens};
-parse_AlternativeTypeList(Tokens,[]) ->
- {AlternativeType,Rest} = parse_NamedType(Tokens),
- parse_AlternativeTypeList(Rest,[AlternativeType]);
-parse_AlternativeTypeList(Tokens,_) ->
- throw({asn1_error,
- {get_line(hd(Tokens)),get(asn1_module),
- [got,[get_token(hd(Tokens)),get_token(hd(tl(Tokens)))],
- expected,['}',', identifier']]}}).
-
-parse_ExtensionAdditionAlternatives(Tokens =[{',',_}|_],Clist) ->
- {ExtAddList,Rest2} = parse_ExtensionAdditionAlternativesList(Tokens,[]),
- {Clist++lists:flatten(ExtAddList),Rest2};
-parse_ExtensionAdditionAlternatives(Tokens,Clist) ->
- %% Empty
- {Clist,Tokens}.
+parse_ExtensionAdditionAlternatives([{',',_}|_]=Tokens0) ->
+ parse_ExtensionAdditionAlternativesList(Tokens0, []);
+parse_ExtensionAdditionAlternatives(Tokens) ->
+ {[],Tokens}.
-parse_ExtensionAdditionAlternativesList([{',',_},Id = {identifier,_,_}|Rest],Acc) ->
- {AlternativeType,Rest2} = parse_NamedType([Id|Rest]),
- parse_ExtensionAdditionAlternativesList(Rest2,[AlternativeType|Acc]);
-parse_ExtensionAdditionAlternativesList([{',',_},C1 = {'[',_},C2 = {'[',_}|Rest],Acc) ->
- {ExtAddGroup,Rest2} = parse_ExtensionAdditionAlternativesGroup([C1,C2|Rest],[]),
- parse_ExtensionAdditionAlternativesList(Rest2,[ExtAddGroup|Acc]);
-parse_ExtensionAdditionAlternativesList(Tokens = [{'}',_}|_],Acc) ->
- {lists:reverse(Acc),Tokens};
-parse_ExtensionAdditionAlternativesList(Tokens = [{',',_},{'...',_}|_],Acc) ->
- {lists:reverse(Acc),Tokens};
-parse_ExtensionAdditionAlternativesList(Tokens,_) ->
- throw({asn1_error,
- {get_line(hd(Tokens)),get(asn1_module),
- [got,[get_token(hd(Tokens)),get_token(hd(tl(Tokens)))],
- expected,['}',', identifier']]}}).
-
-
-parse_ExtensionAdditionAlternativesGroup([ {'[',_},{'[',_},_VsnNr = {number,_,Num},{':',_}|Rest],[]) ->
- parse_ExtensionAdditionAlternativesGroup2(Rest,Num);
-parse_ExtensionAdditionAlternativesGroup([ {'[',_},{'[',_}|Rest],[]) ->
- parse_ExtensionAdditionAlternativesGroup2(Rest,undefined);
-parse_ExtensionAdditionAlternativesGroup(Tokens,_) ->
- throw({asn1_error,
- {get_line(hd(Tokens)),get(asn1_module),
- [got,[get_token(hd(Tokens)),get_token(hd(tl(Tokens)))],
- expected,['[[']]}}).
-
-
-parse_ExtensionAdditionAlternativesGroup2(Tokens,Num) ->
- {CompTypeList,Rest} = parse_AlternativeTypeList(Tokens,[]),
- case Rest of
- [{']',_},{']',_}|Rest2] ->
- {[{'ExtensionAdditionGroup',Num}|CompTypeList] ++
- ['ExtensionAdditionGroupEnd'],Rest2};
+parse_ExtensionAdditionAlternativesList([{',',_}|Tokens1]=Tokens0, Acc) ->
+ try parse_ExtensionAdditionAlternative(Tokens1) of
+ {ExtAddAlt,Tokens2} ->
+ parse_ExtensionAdditionAlternativesList(Tokens2, [ExtAddAlt|Acc])
+ catch
+ throw:{asn1_error,_} ->
+ {lists:append(lists:reverse(Acc)),Tokens0}
+ end;
+parse_ExtensionAdditionAlternativesList(Tokens, Acc) ->
+ {lists:append(lists:reverse(Acc)),Tokens}.
+
+parse_ExtensionAdditionAlternative([#identifier{}|_]=Tokens0) ->
+ {NamedType,Tokens} = parse_NamedType(Tokens0),
+ {[NamedType],Tokens};
+parse_ExtensionAdditionAlternative([{'[',_},{'[',_}|Tokens0]) ->
+ Tokens2 = case Tokens0 of
+ [{number,_,_},{':',_}|Tokens1] -> Tokens1;
+ _ -> Tokens0
+ end,
+ {GroupList,Tokens3} = parse_AlternativeTypeList(Tokens2),
+ case Tokens3 of
+ [{']',_},{']',_}|Tokens] ->
+ {GroupList,Tokens};
_ ->
- throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
- [got,get_token(hd(Rest)),expected,[']]']]}})
- end.
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-%% parse_AlternativeTypeLists(Tokens,ExtensionDefault) ->
-%% {AltTypeList,Rest1} = parse_AlternativeTypeList(Tokens),
-%% {ExtensionAndException,Rest2} =
-%% case Rest1 of
-%% [{',',_},{'...',L1},{'!',_}|Rest12] ->
-%% {_,Rest13} = parse_ExceptionIdentification(Rest12),
-%% %% Exception info is currently thrown away
-%% {[#'EXTENSIONMARK'{pos=L1}],Rest13};
-%% [{',',_},{'...',L1}|Rest12] ->
-%% {[#'EXTENSIONMARK'{pos=L1}],Rest12};
-%% _ ->
-%% {[],Rest1}
-%% end,
-%% {AltTypeList2,Rest5} =
-%% case ExtensionAndException of
-%% [] ->
-%% {AltTypeList,Rest2};
-%% _ ->
-%% {ExtensionAddition,Rest3} =
-%% case Rest2 of
-%% [{',',_}|Rest23] ->
-%% parse_ExtensionAdditionAlternativeList(Rest23);
-%% _ ->
-%% {[],Rest2}
-%% end,
-%% {OptionalExtensionMarker,Rest4} =
-%% case Rest3 of
-%% [{',',_},{'...',L3}|Rest31] ->
-%% {[#'EXTENSIONMARK'{pos=L3}],Rest31};
-%% _ ->
-%% {[],Rest3}
-%% end,
-%% {AltTypeList ++ ExtensionAndException ++
-%% ExtensionAddition ++ OptionalExtensionMarker, Rest4}
-%% end,
-%% AltTypeList3 =
-%% case [X || X=#'EXTENSIONMARK'{} <- AltTypeList2] of
-%% [] when ExtensionDefault == 'IMPLIED' ->
-%% AltTypeList2 ++ [#'EXTENSIONMARK'{}];
-%% _ ->
-%% AltTypeList2
-%% end,
-%% {AltTypeList3,Rest5}.
-
-
-%% parse_AlternativeTypeList(Tokens) ->
-%% parse_AlternativeTypeList(Tokens,[]).
+ parse_error(Tokens3)
+ end;
+parse_ExtensionAdditionAlternative(Tokens) ->
+ parse_error(Tokens).
-%% parse_AlternativeTypeList(Tokens,Acc) ->
-%% {NamedType,Rest} = parse_NamedType(Tokens),
-%% case Rest of
-%% [{',',_},Id = {identifier,_,_}|Rest2] ->
-%% parse_AlternativeTypeList([Id|Rest2],[NamedType|Acc]);
-%% _ ->
-%% {lists:reverse([NamedType|Acc]),Rest}
-%% end.
+%%%
+%%% End of parsing of alternative type lists.
+%%%
-
-
-%% parse_ExtensionAdditionAlternativeList(Tokens) ->
-%% parse_ExtensionAdditionAlternativeList(Tokens,[]).
-
-%% parse_ExtensionAdditionAlternativeList([{'[[',_}|Rest],Acc) ->
-%% parse_ExtensionAdditionAlternativeList(Rest,Acc);
-%% parse_ExtensionAdditionAlternativeList(Tokens = [{identifier,_,_}|_Rest],Acc) ->
-%% {Element,Rest0} = parse_NamedType(Tokens);
-%% case Rest0 of
-%% [{',',_}|Rest01] ->
-%% parse_ExtensionAdditionAlternativeList(Rest01,[Element|Acc]);
-%% _ ->
-%% {lists:reverse([Element|Acc]),Rest0}
-%% end.
-
-%% parse_ExtensionAdditionAlternatives([{'[[',_}|Rest]) ->
-%% parse_ExtensionAdditionAlternatives(Rest,[]);
-%% parse_ExtensionAdditionAlternatives(Tokens) ->
-%% throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
-%% [got,get_token(hd(Tokens)),expected,'[[']}}).
-
-%% parse_ExtensionAdditionAlternatives([Id = {identifier,_,_}|Rest],Acc) ->
-%% {NamedType, Rest2} = parse_NamedType([Id|Rest]),
-%% case Rest2 of
-%% [{',',_}|Rest21] ->
-%% parse_ExtensionAdditionAlternatives(Rest21,[NamedType|Acc]);
-%% [{']]',_}|Rest21] ->
-%% {lists:reverse(Acc),Rest21};
-%% _ ->
-%% throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
-%% [got,get_token(hd(Rest2)),expected,[',',']]']]}})
-%% end.
-
-parse_NamedType([{identifier,L1,Idname}|Rest]) ->
+parse_NamedType([#identifier{pos=L1,val=Idname}|Rest]) ->
{Type,Rest2} = parse_Type(Rest),
{#'ComponentType'{pos=L1,name=Idname,typespec=Type,prop=mandatory},Rest2};
parse_NamedType(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,identifier]}}).
+ parse_error(Tokens).
+%%%
+%%% Parse component type lists for SEQUENCE and SET.
+%%%
parse_ComponentTypeLists(Tokens) ->
- parse_ComponentTypeLists(Tokens,[]).
+ parse_ComponentTypeLists(Tokens, []).
-parse_ComponentTypeLists(Tokens = [{identifier,_,_}|_Rest0],Clist) ->
- {CompList,Rest1} = parse_ComponentTypeList(Tokens,[]),
- parse_ComponentTypeLists(Rest1,Clist++CompList);
-parse_ComponentTypeLists(Tokens = [{'COMPONENTS',_},{'OF',_}|_Rest],Clist) ->
+parse_ComponentTypeLists([#identifier{}|_Rest0]=Tokens, Clist) ->
{CompList,Rest1} = parse_ComponentTypeList(Tokens,[]),
parse_ComponentTypeLists(Rest1,Clist++CompList);
-parse_ComponentTypeLists([{'...',L1},{'!',_}|Rest02],Clist0) ->
- {_,Rest03} = parse_ExceptionIdentification(Rest02),
- %% Exception info is currently thrown away
- parse_ComponentTypeLists2(Rest03,Clist0++[#'EXTENSIONMARK'{pos=L1}]);
+parse_ComponentTypeLists([{'COMPONENTS',_},{'OF',_}|_]=Tokens,Clist) ->
+ {CompList,Rest1} = parse_ComponentTypeList(Tokens, []),
+ parse_ComponentTypeLists(Rest1, Clist++CompList);
parse_ComponentTypeLists([{',',L1},{'...',_},{'!',_}|Rest02],Clist0) when Clist0 =/= []->
{_,Rest03} = parse_ExceptionIdentification(Rest02),
%% Exception info is currently thrown away
parse_ComponentTypeLists2(Rest03,Clist0++[#'EXTENSIONMARK'{pos=L1}]);
-
- parse_ComponentTypeLists([{',',_},{'...',L1}|Rest02],Clist0) when Clist0 =/= []->
+parse_ComponentTypeLists([{',',_},{'...',L1}|Rest02],Clist0) when Clist0 =/= []->
parse_ComponentTypeLists2(Rest02,Clist0++[#'EXTENSIONMARK'{pos=L1}]);
parse_ComponentTypeLists([{'...',L1}|Rest02],Clist0) ->
parse_ComponentTypeLists2(Rest02,Clist0++[#'EXTENSIONMARK'{pos=L1}]);
parse_ComponentTypeLists(Tokens = [{'}',_L1}|_Rest02],Clist0) ->
- {Clist0,Tokens}.
+ {Clist0,Tokens};
+parse_ComponentTypeLists(Tokens, _) ->
+ parse_error(Tokens).
parse_ComponentTypeLists2(Tokens,Clist) ->
{ExtAdd,Rest} = parse_ExtensionAdditions(Tokens,Clist),
@@ -2590,12 +1841,12 @@ parse_OptionalExtensionMarker(Tokens,Clist) ->
{Clist,Tokens}.
-parse_ComponentTypeList([{',',_},Id = {identifier,_,_}|Rest],Acc) when Acc =/= [] ->
- {ComponentType,Rest2} = parse_ComponentType([Id|Rest]),
- parse_ComponentTypeList(Rest2,[ComponentType|Acc]);
-parse_ComponentTypeList([{',',_},C1={'COMPONENTS',_},C2={'OF',_}|Rest],Acc) when Acc =/= [] ->
- {ComponentType,Rest2} = parse_ComponentType([C1,C2|Rest]),
- parse_ComponentTypeList(Rest2,[ComponentType|Acc]);
+parse_ComponentTypeList([{',',_}|[#identifier{}|_]=Tokens0], Acc) when Acc =/= [] ->
+ {ComponentType,Tokens} = parse_ComponentType(Tokens0),
+ parse_ComponentTypeList(Tokens, [ComponentType|Acc]);
+parse_ComponentTypeList([{',',_}|[{'COMPONENTS',_},{'OF',_}|_]=Tokens0], Acc) when Acc =/= [] ->
+ {ComponentType,Tokens} = parse_ComponentType(Tokens0),
+ parse_ComponentTypeList(Tokens, [ComponentType|Acc]);
parse_ComponentTypeList(Tokens = [{'}',_}|_],Acc) ->
{lists:reverse(Acc),Tokens};
parse_ComponentTypeList(Tokens = [{']',_},{']',_}|_],Acc) ->
@@ -2606,10 +1857,7 @@ parse_ComponentTypeList(Tokens,[]) ->
{ComponentType,Rest} = parse_ComponentType(Tokens),
parse_ComponentTypeList(Rest,[ComponentType]);
parse_ComponentTypeList(Tokens,_) ->
- throw({asn1_error,
- {get_line(hd(Tokens)),get(asn1_module),
- [got,[get_token(hd(Tokens)),get_token(hd(tl(Tokens)))],
- expected,['}',', identifier']]}}).
+ parse_error(Tokens).
parse_ExtensionAdditions(Tokens=[{',',_}|_],Clist) ->
{ExtAddList,Rest2} = parse_ExtensionAdditionList(Tokens,[]),
@@ -2618,46 +1866,36 @@ parse_ExtensionAdditions(Tokens,Clist) ->
%% Empty
{Clist,Tokens}.
-parse_ExtensionAdditionList([{',',_},Id = {identifier,_,_}|Rest],Acc) ->
- {ComponentType,Rest2} = parse_ComponentType([Id|Rest]),
- parse_ExtensionAdditionList(Rest2,[ComponentType|Acc]);
-parse_ExtensionAdditionList([{',',_},C1={'COMPONENTS',_},C2={'OF',_}|Rest],Acc) ->
- {ComponentType,Rest2} = parse_ComponentType([C1,C2|Rest]),
- parse_ExtensionAdditionList(Rest2,[ComponentType|Acc]);
-parse_ExtensionAdditionList([{',',_},C1 = {'[',_},C2 = {'[',_}|Rest],Acc) ->
- {ExtAddGroup,Rest2} = parse_ExtensionAdditionGroup([C1,C2|Rest],[]),
+parse_ExtensionAdditionList([{',',_}|[#identifier{}|_]=Tokens0], Acc) ->
+ {ComponentType,Tokens} = parse_ComponentType(Tokens0),
+ parse_ExtensionAdditionList(Tokens, [ComponentType|Acc]);
+parse_ExtensionAdditionList([{',',_}|[{'COMPONENTS',_},{'OF',_}|_]=Tokens0], Acc) ->
+ {ComponentType,Tokens} = parse_ComponentType(Tokens0),
+ parse_ExtensionAdditionList(Tokens, [ComponentType|Acc]);
+parse_ExtensionAdditionList([{',',_},{'[',_},{'[',_}|Tokens], Acc) ->
+ {ExtAddGroup,Rest2} = parse_ExtensionAdditionGroup(Tokens),
parse_ExtensionAdditionList(Rest2,[ExtAddGroup|Acc]);
-parse_ExtensionAdditionList(Tokens = [{'}',_}|_],Acc) ->
+parse_ExtensionAdditionList([{'}',_}|_]=Tokens, Acc) ->
{lists:reverse(Acc),Tokens};
-parse_ExtensionAdditionList(Tokens = [{',',_},{'...',_}|_],Acc) ->
+parse_ExtensionAdditionList([{',',_},{'...',_}|_]=Tokens, Acc) ->
{lists:reverse(Acc),Tokens};
-parse_ExtensionAdditionList(Tokens,_) ->
- throw({asn1_error,
- {get_line(hd(Tokens)),get(asn1_module),
- [got,[get_token(hd(Tokens)),get_token(hd(tl(Tokens)))],
- expected,['}',', identifier']]}}).
-
+parse_ExtensionAdditionList(Tokens, _) ->
+ parse_error(Tokens).
-parse_ExtensionAdditionGroup([ {'[',_},{'[',_},_VsnNr = {number,_,Num},{':',_}|Rest],[]) ->
- parse_ExtensionAdditionGroup2(Rest,Num);
-parse_ExtensionAdditionGroup([ {'[',_},{'[',_}|Rest],[]) ->
- parse_ExtensionAdditionGroup2(Rest,undefined);
-parse_ExtensionAdditionGroup(Tokens,_) ->
- throw({asn1_error,
- {get_line(hd(Tokens)),get(asn1_module),
- [got,[get_token(hd(Tokens)),get_token(hd(tl(Tokens)))],
- expected,['[[']]}}).
+parse_ExtensionAdditionGroup([{number,_,Num},{':',_}|Tokens]) ->
+ parse_ExtensionAdditionGroup2(Tokens, Num);
+parse_ExtensionAdditionGroup(Tokens) ->
+ parse_ExtensionAdditionGroup2(Tokens, undefined).
-parse_ExtensionAdditionGroup2(Tokens,Num) ->
+parse_ExtensionAdditionGroup2(Tokens, Num) ->
{CompTypeList,Rest} = parse_ComponentTypeList(Tokens,[]),
case Rest of
[{']',_},{']',_}|Rest2] ->
{[{'ExtensionAdditionGroup',Num}|CompTypeList] ++
['ExtensionAdditionGroupEnd'],Rest2};
_ ->
- throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
- [got,get_token(hd(Rest)),expected,[']]']]}})
+ parse_error(Rest)
end.
@@ -2676,83 +1914,81 @@ parse_ComponentType(Tokens) ->
Result
end.
-
+%%%
+%%% Parse ENUMERATED.
+%%%
-parse_SignedNumber([{number,_,Value}|Rest]) ->
- {Value,Rest};
-parse_SignedNumber([{'-',_},{number,_,Value}|Rest]) ->
- {-Value,Rest};
-parse_SignedNumber(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- [number,'-number']]}}).
-
-parse_Enumerations(Tokens=[{identifier,_,_}|_Rest],ExtensionDefault) ->
- parse_Enumerations(Tokens,[],ExtensionDefault);
-parse_Enumerations([H|_T],_) ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,identifier]}}).
-
-parse_Enumerations(Tokens = [{identifier,_,_},{'(',_}|_Rest], Acc, ExtensionDefault) ->
- {NamedNumber,Rest2} = parse_NamedNumber(Tokens),
- case Rest2 of
- [{',',_}|Rest3] ->
- parse_Enumerations(Rest3,[NamedNumber|Acc], ExtensionDefault);
- _ when ExtensionDefault == 'IMPLIED'->
- {lists:reverse(['EXTENSIONMARK',NamedNumber|Acc]),Rest2};
+parse_Enumerations(Tokens0) ->
+ {Root,Tokens1} = parse_Enumeration(Tokens0),
+ case Tokens1 of
+ [{',',_},{'...',_},{',',_}|Tokens2] ->
+ {Ext,Tokens} = parse_Enumeration(Tokens2),
+ {Root++['EXTENSIONMARK'|Ext],Tokens};
+ [{',',_},{'...',_}|Tokens] ->
+ {Root++['EXTENSIONMARK'],Tokens};
_ ->
- {lists:reverse([NamedNumber|Acc]),Rest2}
- end;
-parse_Enumerations([{identifier,_,Id}|Rest], Acc, ExtensionDefault) ->
- case Rest of
- [{',',_}|Rest2] ->
- parse_Enumerations(Rest2,[Id|Acc], ExtensionDefault);
- _ when ExtensionDefault == 'IMPLIED' ->
- {lists:reverse(['EXTENSIONMARK', Id |Acc]),Rest};
- _ ->
- {lists:reverse([Id|Acc]),Rest}
- end;
-parse_Enumerations([{'...',_}|Rest], Acc, _ExtensionDefault) ->
- case Rest of
- [{',',_}|Rest2] ->
- parse_Enumerations(Rest2,['EXTENSIONMARK'|Acc],undefined);
- _ ->
- {lists:reverse(['EXTENSIONMARK'|Acc]),Rest}
+ case get(extensiondefault) of
+ 'IMPLIED' ->
+ {Root++['EXTENSIONMARK'],Tokens1};
+ _ ->
+ {Root,Tokens1}
+ end
+ end.
+
+parse_Enumeration(Tokens0) ->
+ {Item,Tokens} = parse_EnumerationItem(Tokens0),
+ parse_Enumeration_1(Tokens, [Item]).
+
+parse_Enumeration_1([{',',_}|Tokens1]=Tokens0, Acc) ->
+ try parse_EnumerationItem(Tokens1) of
+ {Item,Tokens} ->
+ parse_Enumeration_1(Tokens, [Item|Acc])
+ catch
+ throw:{asn1_error,_} ->
+ {lists:reverse(Acc),Tokens0}
end;
-parse_Enumerations([H|_T],_,_) ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,identifier]}}).
+parse_Enumeration_1(Tokens, Acc) ->
+ {lists:reverse(Acc),Tokens}.
+
+parse_EnumerationItem([#identifier{},{'(',_}|_]=Tokens) ->
+ parse_NamedNumber(Tokens);
+parse_EnumerationItem([#identifier{val=Id}|Tokens]) ->
+ {Id,Tokens};
+parse_EnumerationItem(Tokens) ->
+ parse_error(Tokens).
+
+%%%
+%%% End of parsing of ENUMERATED.
+%%%
parse_NamedNumberList(Tokens) ->
- parse_NamedNumberList(Tokens,[]).
+ parse_NamedNumberList(Tokens, []).
-parse_NamedNumberList(Tokens,Acc) ->
+parse_NamedNumberList(Tokens, Acc) ->
{NamedNum,Rest} = parse_NamedNumber(Tokens),
case Rest of
[{',',_}|Rest2] ->
parse_NamedNumberList(Rest2,[NamedNum|Acc]);
_ ->
- {lists:reverse([NamedNum|Acc]),Rest}
+ {lists:reverse(Acc, [NamedNum]),Rest}
end.
-parse_NamedNumber([{identifier,_,Name},{'(',_}|Rest]) ->
+parse_NamedNumber([#identifier{val=Name},{'(',_}|Rest]) ->
Flist = [fun parse_SignedNumber/1,
fun parse_DefinedValue/1],
- case (catch parse_or(Rest,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
+ case parse_or(Rest, Flist) of
{NamedNum,[{')',_}|Rest2]} ->
{{'NamedNumber',Name,NamedNum},Rest2};
_ ->
- throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
- [got,get_token(hd(Rest)),expected,'NamedNumberList']}})
+ parse_error(Rest)
end;
parse_NamedNumber(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,identifier]}}).
+ parse_error(Tokens).
+parse_SignedNumber([{number,_,Value}|Rest]) ->
+ {Value,Rest};
+parse_SignedNumber(Tokens) ->
+ parse_error(Tokens).
parse_Tag([{'[',_}|Rest]) ->
{Class,Rest2} = parse_Class(Rest),
@@ -2767,12 +2003,8 @@ parse_Tag([{'[',_}|Rest]) ->
[{']',_}|Rest4] ->
{#tag{class=Class,number=ClassNumber},Rest4};
_ ->
- throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
- [got,get_token(hd(Rest3)),expected,']']}})
- end;
-parse_Tag(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'[']}}).
+ parse_error(Rest3)
+ end.
parse_Class([{'UNIVERSAL',_}|Rest]) ->
{'UNIVERSAL',Rest};
@@ -2791,15 +2023,7 @@ parse_Value(Tokens) ->
Flist = [fun parse_BuiltinValue/1,
fun parse_ValueFromObject/1,
fun parse_DefinedValue/1],
-
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_BuiltinValue([{bstring,_,Bstr}|Rest]) ->
{{bstring,Bstr},Rest};
@@ -2812,18 +2036,11 @@ parse_BuiltinValue(Tokens = [{'{',_}|_Rest]) ->
fun parse_SequenceOfValue/1,
fun parse_SequenceValue/1,
fun parse_ObjectIdentifierValue/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end;
-parse_BuiltinValue([{identifier,_,IdName},{':',_}|Rest]) ->
+ parse_or(Tokens, Flist);
+parse_BuiltinValue([#identifier{val=IdName},{':',_}|Rest]) ->
{Value,Rest2} = parse_Value(Rest),
{{'CHOICE',{IdName,Value}},Rest2};
-parse_BuiltinValue(Tokens=[{'NULL',_},{':',_}|_Rest]) ->
+parse_BuiltinValue([{'NULL',_},{':',_}|_]=Tokens) ->
parse_ObjectClassFieldValue(Tokens);
parse_BuiltinValue([{'NULL',_}|Rest]) ->
{'NULL',Rest};
@@ -2839,31 +2056,29 @@ parse_BuiltinValue([{cstring,_,Cstr}|Rest]) ->
{Cstr,Rest};
parse_BuiltinValue([{number,_,Num}|Rest]) ->
{Num,Rest};
-parse_BuiltinValue([{'-',_},{number,_,Num}|Rest]) ->
- {- Num,Rest};
parse_BuiltinValue(Tokens) ->
parse_ObjectClassFieldValue(Tokens).
-parse_DefinedValue(Tokens=[{identifier,_,_},{'{',_}|_Rest]) ->
- parse_ParameterizedValue(Tokens);
-%% Externalvaluereference
-parse_DefinedValue([{typereference,L1,Tname},{'.',_},{identifier,_,Idname}|Rest]) ->
+parse_DefinedValue(Tokens) ->
+ Flist = [fun parse_ParameterizedValue/1,
+ fun parse_DefinedValue2/1],
+ parse_or(Tokens, Flist).
+
+parse_DefinedValue2([{typereference,L1,Tname},
+ {'.',_},
+ #identifier{val=Idname}|Rest]) ->
{#'Externalvaluereference'{pos=L1,module=Tname,value=Idname},Rest};
%% valuereference
-parse_DefinedValue([Id = {identifier,_,_}|Rest]) ->
+parse_DefinedValue2([#identifier{}=Id|Rest]) ->
{identifier2Extvalueref(Id),Rest};
-%% ParameterizedValue
-parse_DefinedValue(Tokens) ->
- parse_ParameterizedValue(Tokens).
+parse_DefinedValue2(Tokens) ->
+ parse_error(Tokens).
parse_SequenceValue([{'{',_}|Tokens]) ->
- parse_SequenceValue(Tokens,[]);
-parse_SequenceValue(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'{']}}).
+ parse_SequenceValue(Tokens, []).
-parse_SequenceValue([{identifier,Pos,IdName}|Rest],Acc) ->
+parse_SequenceValue([#identifier{pos=Pos,val=IdName}|Rest],Acc) ->
{Value,Rest2} = parse_Value(Rest),
SeqTag = #seqtag{pos=Pos,module=get(asn1_module),val=IdName},
case Rest2 of
@@ -2872,18 +2087,13 @@ parse_SequenceValue([{identifier,Pos,IdName}|Rest],Acc) ->
[{'}',_}|Rest3] ->
{lists:reverse(Acc, [{SeqTag,Value}]),Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'}']}})
+ parse_error(Rest2)
end;
parse_SequenceValue(Tokens,_Acc) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,identifier]}}).
+ parse_error(Tokens).
parse_SequenceOfValue([{'{',_}|Tokens]) ->
- parse_SequenceOfValue(Tokens,[]);
-parse_SequenceOfValue(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'{']}}).
+ parse_SequenceOfValue(Tokens, []).
parse_SequenceOfValue(Tokens,Acc) ->
{Value,Rest2} = parse_Value(Tokens),
@@ -2891,10 +2101,9 @@ parse_SequenceOfValue(Tokens,Acc) ->
[{',',_}|Rest3] ->
parse_SequenceOfValue(Rest3,[Value|Acc]);
[{'}',_}|Rest3] ->
- {lists:reverse([Value|Acc]),Rest3};
+ {lists:reverse(Acc, [Value]),Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'}']}})
+ parse_error(Rest2)
end.
parse_ValueSetTypeAssignment([{typereference,L1,Name}|Rest]) ->
@@ -2904,49 +2113,31 @@ parse_ValueSetTypeAssignment([{typereference,L1,Name}|Rest]) ->
{ValueSet,Rest4} = parse_ValueSet(Rest3),
{#valuedef{pos=L1,name=Name,type=Type,value=ValueSet,
module=get(asn1_module)},Rest4};
- [H|_T] ->
- throw({asn1_error,{get_line(L1),get(asn1_module),
- [got,get_token(H),expected,'::=']}})
- end;
-parse_ValueSetTypeAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- typereference]}}).
+ _ ->
+ parse_error(Rest2)
+ end.
parse_ValueSet([{'{',_}|Rest]) ->
{Elems,Rest2} = parse_ElementSetSpecs(Rest),
case Rest2 of
[{'}',_}|Rest3] ->
{{valueset,Elems},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'}']}})
+ _ ->
+ parse_error(Rest2)
end;
parse_ValueSet(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'{']}}).
+ parse_error(Tokens).
-parse_ValueAssignment([{identifier,L1,IdName}|Rest]) ->
+parse_ValueAssignment([#identifier{pos=L1,val=IdName}|Rest]) ->
{Type,Rest2} = parse_Type(Rest),
case Rest2 of
[{'::=',_}|Rest3] ->
{Value,Rest4} = parse_Value(Rest3),
- case catch lookahead_assignment(Rest4) of
- ok ->
- {#valuedef{pos=L1,name=IdName,type=Type,value=Value,
- module=get(asn1_module)},Rest4};
- Error ->
- throw(Error)
-%% throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
-%% [got,get_token(hd(Rest2)),expected,'::=']}})
- end;
+ {#valuedef{pos=L1,name=IdName,type=Type,value=Value,
+ module=get(asn1_module)},Rest4};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'::=']}})
- end;
-parse_ValueAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,identifier]}}).
+ parse_error(Rest2)
+ end.
%% SizeConstraint
parse_SubtypeElements([{'SIZE',_}|Tokens]) ->
@@ -2966,8 +2157,7 @@ parse_SubtypeElements([{'WITH',_},{'COMPONENTS',_},{'{',_},{'...',_},{',',_}|Tok
[{'}',_}|Rest2] ->
{{'WITH COMPONENTS',{'PartialSpecification',Constraint}},Rest2};
_ ->
- throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
- [got,get_token(hd(Rest)),expected,'}']}})
+ parse_error(Rest)
end;
parse_SubtypeElements([{'WITH',_},{'COMPONENTS',_},{'{',_}|Tokens]) ->
{Constraint,Rest} = parse_TypeConstraints(Tokens),
@@ -2975,28 +2165,18 @@ parse_SubtypeElements([{'WITH',_},{'COMPONENTS',_},{'{',_}|Tokens]) ->
[{'}',_}|Rest2] ->
{{'WITH COMPONENTS',{'FullSpecification',Constraint}},Rest2};
_ ->
- throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
- [got,get_token(hd(Rest)),expected,'}']}})
+ parse_error(Rest)
end;
parse_SubtypeElements([{'PATTERN',_}|Tokens]) ->
{Value,Rest} = parse_Value(Tokens),
{{pattern,Value},Rest};
-%% SingleValue
-%% ContainedSubtype
-%% ValueRange
-%% TypeConstraint
-%% Moved fun parse_Value/1 and fun parse_Type/1 to parse_Elements
parse_SubtypeElements(Tokens) ->
Flist = [fun parse_ContainedSubtype/1,
fun parse_Value/1,
- fun([{'MIN',_}|T]) -> {'MIN',T} end,
+ fun parse_MIN/1,
fun parse_Type/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- {asn1_error,Reason} ->
- throw(Reason);
- Result = {Val,_} when is_record(Val,type) ->
+ case parse_or(Tokens, Flist) of
+ {#type{},_}=Result ->
Result;
{Lower,[{'..',_}|Rest]} ->
{Upper,Rest2} = parse_UpperEndpoint(Rest),
@@ -3014,10 +2194,7 @@ parse_ContainedSubtype([{'INCLUDES',_}|Rest]) ->
{Type,Rest2} = parse_Type(Rest),
{{'ContainedSubtype',Type},Rest2};
parse_ContainedSubtype(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'INCLUDES']}}).
-%%parse_ContainedSubtype(Tokens) -> %this option is moved to parse_SubtypeElements
-%% parse_Type(Tokens).
+ parse_error(Tokens).
parse_UpperEndpoint([{'<',_}|Rest]) ->
parse_UpperEndpoint(lt,Rest);
@@ -3025,33 +2202,38 @@ parse_UpperEndpoint(Tokens) ->
parse_UpperEndpoint(false,Tokens).
parse_UpperEndpoint(Lt,Tokens) ->
- Flist = [ fun([{'MAX',_}|T]) -> {'MAX',T} end,
- fun parse_Value/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- {Value,Rest2} when Lt == lt ->
+ Flist = [fun parse_MAX/1,
+ fun parse_Value/1],
+ case parse_or(Tokens, Flist) of
+ {Value,Rest2} when Lt =:= lt ->
{{lt,Value},Rest2};
{Value,Rest2} ->
{Value,Rest2}
end.
+parse_MIN([{'MIN',_}|T]) ->
+ {'MIN',T};
+parse_MIN(Tokens) ->
+ parse_error(Tokens).
+
+parse_MAX([{'MAX',_}|T]) ->
+ {'MAX',T};
+parse_MAX(Tokens) ->
+ parse_error(Tokens).
+
parse_TypeConstraints(Tokens) ->
- parse_TypeConstraints(Tokens,[]).
+ parse_TypeConstraints(Tokens, []).
-parse_TypeConstraints([{identifier,_,_}|Rest],Acc) ->
+parse_TypeConstraints([#identifier{}|Rest], Acc) ->
{ComponentConstraint,Rest2} = parse_ComponentConstraint(Rest),
case Rest2 of
[{',',_}|Rest3] ->
- parse_TypeConstraints(Rest3,[ComponentConstraint|Acc]);
+ parse_TypeConstraints(Rest3, [ComponentConstraint|Acc]);
_ ->
- {lists:reverse([ComponentConstraint|Acc]),Rest2}
+ {lists:reverse(Acc, [ComponentConstraint]),Rest2}
end;
-parse_TypeConstraints([H|_T],_) ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,identifier]}}).
+parse_TypeConstraints(Tokens, _) ->
+ parse_error(Tokens).
parse_ComponentConstraint(Tokens = [{'(',_}|_Rest]) ->
{ValueConstraint,Rest2} = parse_Constraint(Tokens),
@@ -3071,145 +2253,36 @@ parse_PresenceConstraint(Tokens) ->
{asn1_empty,Tokens}.
-% merge_constraints({Rlist,ExtList}) -> % extensionmarker in constraint
-% {merge_constraints(Rlist,[],[]),
-% merge_constraints(ExtList,[],[])};
-
-%% An arg with a constraint with extension marker will look like
-%% [#constraint{c={Root,Ext}}|Rest]
-
merge_constraints(Clist) ->
merge_constraints(Clist, [], []).
-merge_constraints([Ch|Ct],Cacc, Eacc) ->
- NewEacc = case Ch#constraint.e of
- undefined -> Eacc;
- E -> [E|Eacc]
- end,
- merge_constraints(Ct,[fixup_constraint(Ch#constraint.c)|Cacc],NewEacc);
-
-merge_constraints([],Cacc,[]) ->
-%% lists:flatten(Cacc);
+merge_constraints([#constraint{c=C,e=E}|T], Cacc0, Eacc0) ->
+ Eacc = case E of
+ undefined -> Eacc0;
+ E -> [E|Eacc0]
+ end,
+ Cacc = [C|Cacc0],
+ merge_constraints(T, Cacc, Eacc);
+merge_constraints([], Cacc, []) ->
lists:reverse(Cacc);
-merge_constraints([],Cacc,Eacc) ->
-%% lists:flatten(Cacc) ++ [{'Errors',Eacc}].
- lists:reverse(Cacc) ++ [{'Errors',Eacc}].
-
-
-fixup_constraint(C) ->
- case C of
- {'SingleValue',SubType} when element(1,SubType) == 'ContainedSubtype' ->
- SubType;
- {'SingleValue',V} when is_list(V) ->
- C;
- %% [C,{'ValueRange',{lists:min(V),lists:max(V)}}];
- %% bug, turns wrong when an element in V is a reference to a defined value
- {'PermittedAlphabet',{'SingleValue',V}} when is_list(V) ->
- %%sort and remove duplicates
- V2 = {'SingleValue',
- ordsets:from_list(lists:flatten(V))},
- {'PermittedAlphabet',V2};
- {'PermittedAlphabet',{'SingleValue',V}} ->
- V2 = {'SingleValue',[V]},
- {'PermittedAlphabet',V2};
- {'SizeConstraint',Sc} ->
- {'SizeConstraint',fixup_size_constraint(Sc)};
-
- List when is_list(List) -> %% In This case maybe a union or intersection
- [fixup_constraint(Xc)||Xc <- List];
- Other ->
- Other
- end.
+merge_constraints([], Cacc, Eacc) ->
+ lists:reverse(Cacc) ++ [{element_set,{'Errors',Eacc},none}].
-fixup_size_constraint({'ValueRange',{Lb,Ub}}) ->
- {Lb,Ub};
-fixup_size_constraint({{'ValueRange',R},[]}) ->
- {R,[]};
-fixup_size_constraint({[],{'ValueRange',R}}) ->
- {[],R};
-fixup_size_constraint({{'ValueRange',R1},{'ValueRange',R2}}) ->
- {R1,R2};
-fixup_size_constraint({'SingleValue',[Sv]}) ->
- fixup_size_constraint({'SingleValue',Sv});
-fixup_size_constraint({'SingleValue',L}) when is_list(L) ->
- ordsets:from_list(L);
-fixup_size_constraint({'SingleValue',L}) ->
- {L,L};
-fixup_size_constraint({'SizeConstraint',C}) ->
- %% this is a second SIZE
- fixup_size_constraint(C);
-fixup_size_constraint({C1,C2}) ->
- %% this is with extension marks
- {turn2vr(fixup_size_constraint(C1)), extension_size(fixup_size_constraint(C2))};
-fixup_size_constraint(CList) when is_list(CList) ->
- [fixup_constraint(Xc)||Xc <- CList].
-
-turn2vr(L) when is_list(L) ->
- L2 =[X||X<-ordsets:from_list(L),is_integer(X)],
- case L2 of
- [H|_] ->
- {H,hd(lists:reverse(L2))};
- _ ->
- L
- end;
-turn2vr(VR) ->
- VR.
-extension_size({I,I}) ->
- [I];
-extension_size({I1,I2}) ->
- [I1,I2];
-extension_size(C) ->
- C.
-
-get_line({_,Pos,Token}) when is_integer(Pos),is_atom(Token) ->
+get_line({Token,Pos,_}) when is_integer(Pos), is_atom(Token) ->
Pos;
get_line({Token,Pos}) when is_integer(Pos),is_atom(Token) ->
- Pos;
-get_line(_) ->
- undefined.
-
-get_token({_,Pos,Token}) when is_integer(Pos),is_atom(Token) ->
- Token;
+ Pos.
+
+get_token({valuefieldreference,_,FieldName}) ->
+ list_to_atom([$&|atom_to_list(FieldName)]);
+get_token({typefieldreference,_,FieldName}) ->
+ list_to_atom([$&|atom_to_list(FieldName)]);
+get_token({Token,Pos,Value}) when is_integer(Pos), is_atom(Token) ->
+ Value;
get_token({'$end',Pos}) when is_integer(Pos) ->
- undefined;
+ 'END-OF-FILE';
get_token({Token,Pos}) when is_integer(Pos),is_atom(Token) ->
- Token;
-get_token(_) ->
- undefined.
-
-prioritize_error(ErrList) ->
- case lists:keymember(asn1_error,1,ErrList) of
- false -> % only asn1_assignment_error -> take the last
- lists:last(ErrList);
- true -> % contains errors from deeper in a Type
- NewErrList = [_Err={_,_}|_RestErr] =
- lists:filter(fun({asn1_error,_})->true;(_)->false end,
- ErrList),
- SplitErrs =
- lists:splitwith(fun({_,X})->
- case element(1,X) of
- Int when is_integer(Int) -> true;
- _ -> false
- end
- end,
- NewErrList),
- case SplitErrs of
- {[],UndefPosErrs} -> % if no error with Positon exists
- lists:last(UndefPosErrs);
- {IntPosErrs,_} ->
- IntPosReasons = lists:map(fun(X)->element(2,X) end,IntPosErrs),
- SortedReasons = lists:keysort(1,IntPosReasons),
- {asn1_error,lists:last(SortedReasons)}
- end
- end.
-
-%% most_prio_error([H={_,Reason}|T],Atom,Err) when is_atom(Atom) ->
-%% most_prio_error(T,element(1,Reason),H);
-%% most_prio_error([H={_,Reason}|T],Greatest,Err) ->
-%% case element(1,Reason) of
-%% Pos when is_integer(Pos),Pos>Greatest ->
-%% most_prio_error(
-
+ Token.
tref2Exttref(#typereference{pos=Pos,val=Name}) ->
#'Externaltypereference'{pos=Pos,
@@ -3226,19 +2299,5 @@ identifier2Extvalueref(#identifier{pos=Pos,val=Name}) ->
module=resolve_module(Name),
value=Name}.
-%% lookahead_assignment/1 checks that the next sequence of tokens
-%% in Token contain a valid assignment or the
-%% 'END' token. Otherwise an exception is thrown.
-lookahead_assignment([{'END',_}|_Rest]) ->
- ok;
-lookahead_assignment(Tokens) ->
- parse_Assignment(Tokens),
- ok.
-
-is_pre_defined_class('TYPE-IDENTIFIER') ->
- true;
-is_pre_defined_class('ABSTRACT-SYNTAX') ->
- true;
-is_pre_defined_class(_) ->
- false.
-
+parse_error(Tokens) ->
+ throw({asn1_error,{parse_error,Tokens}}).
diff --git a/lib/asn1/src/asn1ct_tok.erl b/lib/asn1/src/asn1ct_tok.erl
index 8687ed955c..d51fea6402 100644
--- a/lib/asn1/src/asn1ct_tok.erl
+++ b/lib/asn1/src/asn1ct_tok.erl
@@ -21,191 +21,177 @@
%% Tokenize ASN.1 code (input to parser generated with yecc)
--export([get_name/2,tokenise/4, file/1]).
+-export([file/1,format_error/1]).
-
-file(File) ->
- case file:open(File, [read]) of
+file(File0) ->
+ case file:open(File0, [read]) of
{error, Reason} ->
- {error,{File,file:format_error(Reason)}};
+ {error,{File0,file:format_error(Reason)}};
{ok,Stream} ->
- process(Stream,0,[])
+ try
+ process(Stream, 1, [])
+ catch
+ throw:{error,Line,Reason} ->
+ File = filename:basename(File0),
+ Error = {structured_error,{File,Line},?MODULE,Reason},
+ {error,[Error]}
+ end
end.
-process(Stream,Lno,R) ->
- process(io:get_line(Stream, ''), Stream,Lno+1,R).
+process(Stream, Lno, R) ->
+ process(io:get_line(Stream, ''), Stream, Lno, R).
-process(eof, Stream,Lno,R) ->
+process(eof, Stream, Lno, Acc) ->
ok = file:close(Stream),
- lists:flatten(lists:reverse([{'$end',Lno}|R]));
-
-
-process(L, Stream,Lno,R) when is_list(L) ->
- %%io:format('read:~s',[L]),
- case catch tokenise(Stream,L,Lno,[]) of
- {'ERR',Reason} ->
- io:format("Tokeniser error on line: ~w ~w~n",[Lno,Reason]),
- exit(0);
- {NewLno,T} ->
- %%io:format('toks:~w~n',[T]),
- process(Stream,NewLno,[T|R])
- end.
-
-tokenise(Stream,[H|T],Lno,R) when $a =< H , H =< $z ->
- {X, T1} = get_name(T, [H]),
- tokenise(Stream,T1,Lno,[{identifier,Lno, list_to_atom(X)}|R]);
-
-tokenise(Stream,[$&,H|T],Lno,R) when $A =< H , H =< $Z ->
- {Y, T1} = get_name(T, [H]),
- X = list_to_atom(Y),
- tokenise(Stream,T1,Lno,[{typefieldreference, Lno, X} | R]);
-
-tokenise(Stream,[$&,H|T],Lno,R) when $a =< H , H =< $z ->
- {Y, T1} = get_name(T, [H]),
- X = list_to_atom(Y),
- tokenise(Stream,T1,Lno,[{valuefieldreference, Lno, X} | R]);
-
-tokenise(Stream,[H|T],Lno,R) when $A =< H , H =< $Z ->
- {Y, T1} = get_name(T, [H]),
- X = list_to_atom(Y),
- case reserved_word(X) of
- true ->
- tokenise(Stream,T1,Lno,[{X,Lno}|R]);
- false ->
- tokenise(Stream,T1,Lno,[{typereference,Lno,X}|R]);
- rstrtype ->
- tokenise(Stream,T1,Lno,[{restrictedcharacterstringtype,Lno,X}|R])
- end;
-
-tokenise(Stream,[$-,H|T],Lno,R) when $0 =< H , H =< $9 ->
- {X, T1} = get_number(T, [H]),
- tokenise(Stream,T1,Lno,[{number,Lno,-1 * list_to_integer(X)}|R]);
+ lists:reverse([{'$end',Lno}|Acc]);
+process(L, Stream, Lno0, Acc) when is_list(L) ->
+ try tokenise(Stream, L, Lno0, []) of
+ {Lno,[]} ->
+ process(Stream, Lno, Acc);
+ {Lno,Ts} ->
+ process(Stream, Lno, Ts++Acc)
+ catch
+ throw:{error,Reason} ->
+ throw({error,Lno0,Reason})
+ end.
-tokenise(Stream,[H|T],Lno,R) when $0 =< H , H =< $9 ->
+format_error(eof_in_comment) ->
+ "premature end of file in multi-line comment";
+format_error(eol_in_token) ->
+ "end of line in token";
+format_error({invalid_binary_number,Str}) ->
+ io_lib:format("invalid binary number: '~s'", [Str]);
+format_error({invalid_hex_number,Str}) ->
+ io_lib:format("invalid hex number: '~s'", [Str]);
+format_error(Other) ->
+ io_lib:format("~p", [Other]).
+
+tokenise(Stream, [$&,H|T], Lno, R) when $A =< H , H =< $Z ->
+ {X,T1} = get_name(T, [H]),
+ tokenise(Stream, T1, Lno, [{typefieldreference,Lno,X}|R]);
+tokenise(Stream, [$&,H|T], Lno, R) when $a =< H , H =< $z ->
+ {X,T1} = get_name(T, [H]),
+ tokenise(Stream, T1, Lno, [{valuefieldreference,Lno,X}|R]);
+
+tokenise(Stream, "--"++T, Lno, R) ->
+ tokenise(Stream, skip_comment(T), Lno, R);
+
+tokenise(Stream, [$-,H|T], Lno, R) when $0 =< H , H =< $9 ->
{X, T1} = get_number(T, [H]),
- tokenise(Stream,T1,Lno,[{number,Lno,list_to_integer(X)}|R]);
-
-tokenise(Stream,[$-,$-|T],Lno,R) ->
- tokenise(Stream,skip_comment(T),Lno,R);
+ tokenise(Stream, T1, Lno, [{number,Lno,-list_to_integer(X)}|R]);
-tokenise(Stream,[$/,$*|T],Lno,R) ->
- {NewLno,T1} = skip_multiline_comment(Stream,T,Lno,0),
- tokenise(Stream,T1,NewLno,R);
+tokenise(Stream, "/*"++T, Lno0, R) ->
+ {Lno,T1} = skip_multiline_comment(Stream, T, Lno0, 0),
+ tokenise(Stream, T1, Lno, R);
-tokenise(Stream,[$:,$:,$=|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'::=',Lno}|R]);
-
-tokenise(Stream,[$'|T],Lno,R) ->
- case catch collect_quoted(T,Lno,[]) of
- {'ERR',_} ->
- throw({'ERR','bad_quote'});
- {Thing, T1} ->
- tokenise(Stream,T1,Lno,[Thing|R])
- end;
+tokenise(Stream, "::="++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{'::=',Lno}|R]);
+tokenise(Stream, ":"++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{':',Lno}|R]);
+tokenise(Stream, "'"++T0, Lno, R) ->
+ {Thing, T1} = collect_quoted(T0, Lno, []),
+ tokenise(Stream, T1, Lno, [Thing|R]);
tokenise(Stream,[$"|T],Lno,R) ->
{Str,T1} = collect_string(T,Lno),
tokenise(Stream,T1,Lno,[Str|R]);
-tokenise(Stream,[${|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'{',Lno}|R]);
-
-tokenise(Stream,[$}|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'}',Lno}|R]);
-
-%% tokenise(Stream,[$],$]|T],Lno,R) ->
-%% tokenise(Stream,T,Lno,[{']]',Lno}|R]);
+tokenise(Stream, "{"++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{'{',Lno}|R]);
+tokenise(Stream, "}"++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{'}',Lno}|R]);
%% Even though x.680 specify '[[' and ']]' as lexical items
-%% it does not work to have them as such since the single [ and ] can
-%% be used beside each other in the SYNTAX OF in x.681
-%% the solution chosen here , i.e. to have them as separate lexical items
+%% it does not work to have them as such since the single '[' and ']' can
+%% be used beside each other in 'WITH SYNTAX' in x.681.
+%% The solution chosen here, i.e. to have them as separate lexical items
%% will not detect the cases where there is white space between them
-%% which would be an error in the use in ExtensionAdditionGroups
-
-%% tokenise(Stream,[$[,$[|T],Lno,R) ->
-%% tokenise(Stream,T,Lno,[{'[[',Lno}|R]);
-
-tokenise(Stream,[$]|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{']',Lno}|R]);
-
-tokenise(Stream,[$[|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'[',Lno}|R]);
+%% which would be an error in the use in ExtensionAdditionGroups.
-tokenise(Stream,[$,|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{',',Lno}|R]);
+tokenise(Stream, "]"++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{']',Lno}|R]);
+tokenise(Stream, "["++T,Lno,R) ->
+ tokenise(Stream, T, Lno, [{'[',Lno}|R]);
-tokenise(Stream,[$(|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'(',Lno}|R]);
-tokenise(Stream,[$)|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{')',Lno}|R]);
+tokenise(Stream, ","++T,Lno,R) ->
+ tokenise(Stream, T, Lno, [{',',Lno}|R]);
-tokenise(Stream,[$.,$.,$.|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'...',Lno}|R]);
+tokenise(Stream, "("++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{'(',Lno}|R]);
+tokenise(Stream, ")"++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{')',Lno}|R]);
-tokenise(Stream,[$.,$.|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'..',Lno}|R]);
+tokenise(Stream, "..."++T,Lno,R) ->
+ tokenise(Stream, T, Lno, [{'...',Lno}|R]);
+tokenise(Stream, ".."++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{'..',Lno}|R]);
+tokenise(Stream, "."++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{'.',Lno}|R]);
-tokenise(Stream,[$.|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'.',Lno}|R]);
-tokenise(Stream,[$^|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'^',Lno}|R]);
-tokenise(Stream,[$!|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'!',Lno}|R]);
-tokenise(Stream,[$||T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'|',Lno}|R]);
+tokenise(Stream, "|"++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{'|',Lno}|R]);
-tokenise(Stream,[H|T],Lno,R) ->
- case white_space(H) of
+tokenise(Stream, [H|T], Lno, R) when $A =< H , H =< $Z ->
+ {X,T1} = get_name(T, [H]),
+ case reserved_word(X) of
true ->
- tokenise(Stream,T,Lno,R);
+ tokenise(Stream, T1, Lno, [{X,Lno}|R]);
false ->
- tokenise(Stream,T,Lno,[{list_to_atom([H]),Lno}|R])
+ tokenise(Stream, T1, Lno, [{typereference,Lno,X}|R]);
+ rstrtype ->
+ tokenise(Stream, T1, Lno, [{restrictedcharacterstringtype,Lno,X}|R])
end;
-tokenise(_Stream,[],Lno,R) ->
- {Lno,lists:reverse(R)}.
+tokenise(Stream, [H|T], Lno, R) when $a =< H , H =< $z ->
+ {X, T1} = get_name(T, [H]),
+ tokenise(Stream, T1, Lno, [{identifier,Lno,X}|R]);
-collect_string(L,Lno) ->
- collect_string(L,Lno,[]).
+tokenise(Stream, [H|T], Lno, R) when $0 =< H , H =< $9 ->
+ {X, T1} = get_number(T, [H]),
+ tokenise(Stream, T1, Lno, [{number,Lno,list_to_integer(X)}|R]);
-collect_string([],_,_) ->
- throw({'ERR','bad_quote found eof'});
+tokenise(Stream, [H|T], Lno, R) when H =< $\s ->
+ tokenise(Stream, T, Lno, R);
-collect_string([H|T],Lno,Str) ->
- case H of
- $" ->
- {{cstring,1,lists:reverse(Str)},T};
- Ch ->
- collect_string(T,Lno,[Ch|Str])
- end.
-
+tokenise(Stream, [H|T], Lno, R) ->
+ tokenise(Stream, T, Lno, [{list_to_atom([H]),Lno}|R]);
+tokenise(_Stream, [], Lno, R) ->
+ {Lno+1,R}.
-% <name> is letters digits hyphens
-% hypen is not the last character. Hypen hyphen is NOT allowed
-%
-% <identifier> ::= <lowercase> <name>
+collect_string(L, Lno) ->
+ collect_string(L, Lno, []).
-get_name([$-,Char|T], L) ->
+collect_string([$"|T], _Lno, Str) ->
+ {{cstring,1,lists:reverse(Str)},T};
+collect_string([H|T], Lno, Str) ->
+ collect_string(T, Lno, [H|Str]);
+collect_string([], _, _) ->
+ throw({error,missing_quote_at_eof}).
+
+%% <name> is letters digits hyphens.
+%% Hypen is not the last character. Hypen hyphen is NOT allowed.
+%%
+%% <identifier> ::= <lowercase> <name>
+
+get_name([$-,Char|T]=T0, Acc) ->
case isalnum(Char) of
true ->
- get_name(T,[Char,$-|L]);
+ get_name(T, [Char,$-|Acc]);
false ->
- {lists:reverse(L),[$-,Char|T]}
+ {list_to_atom(lists:reverse(Acc)),T0}
end;
-get_name([$-|T], L) ->
- {lists:reverse(L),[$-|T]};
-get_name([Char|T], L) ->
+get_name([$-|_]=T, Acc) ->
+ {list_to_atom(lists:reverse(Acc)),T};
+get_name([Char|T]=T0, Acc) ->
case isalnum(Char) of
true ->
- get_name(T,[Char|L]);
+ get_name(T, [Char|Acc]);
false ->
- {lists:reverse(L),[Char|T]}
+ {list_to_atom(lists:reverse(Acc)),T0}
end;
-get_name([], L) ->
- {lists:reverse(L), []}.
-
+get_name([], Acc) ->
+ {list_to_atom(lists:reverse(Acc)),[]}.
isalnum(H) when $A =< H , H =< $Z ->
true;
@@ -221,67 +207,54 @@ isdigit(H) when $0 =< H , H =< $9 ->
isdigit(_) ->
false.
-white_space(9) -> true;
-white_space(10) -> true;
-white_space(13) -> true;
-white_space(32) -> true;
-white_space(_) -> false.
-
-
-get_number([H|T], L) ->
+get_number([H|T]=T0, L) ->
case isdigit(H) of
true ->
get_number(T, [H|L]);
false ->
- {lists:reverse(L), [H|T]}
+ {lists:reverse(L), T0}
end;
get_number([], L) ->
{lists:reverse(L), []}.
-skip_comment([]) ->
- [];
-skip_comment([$-,$-|T]) ->
- T;
-skip_comment([_|T]) ->
- skip_comment(T).
-
+skip_comment([]) -> [];
+skip_comment("--"++T) -> T;
+skip_comment([_|T]) -> skip_comment(T).
-skip_multiline_comment(Stream,[],Lno,Level) ->
- case io:get_line(Stream,'') of
+skip_multiline_comment(Stream, [], Lno, Level) ->
+ case io:get_line(Stream, '') of
eof ->
- io:format("Tokeniser error on line: ~w~n"
- "premature end of multiline comment~n",[Lno]),
- exit(0);
+ throw({error,eof_in_comment});
Line ->
- skip_multiline_comment(Stream,Line,Lno+1,Level)
+ skip_multiline_comment(Stream, Line, Lno+1, Level)
end;
-skip_multiline_comment(_Stream,[$*,$/|T],Lno,0) ->
+skip_multiline_comment(_Stream, "*/"++T, Lno, 0) ->
{Lno,T};
-skip_multiline_comment(Stream,[$*,$/|T],Lno,Level) ->
- skip_multiline_comment(Stream,T,Lno,Level - 1);
-skip_multiline_comment(Stream,[$/,$*|T],Lno,Level) ->
- skip_multiline_comment(Stream,T,Lno,Level + 1);
-skip_multiline_comment(Stream,[_|T],Lno,Level) ->
- skip_multiline_comment(Stream,T,Lno,Level).
-
-collect_quoted([$',$B|T],Lno, L) ->
+skip_multiline_comment(Stream, "*/"++T, Lno, Level) ->
+ skip_multiline_comment(Stream, T, Lno, Level - 1);
+skip_multiline_comment(Stream, "/*"++T, Lno, Level) ->
+ skip_multiline_comment(Stream, T, Lno, Level + 1);
+skip_multiline_comment(Stream, [_|T], Lno, Level) ->
+ skip_multiline_comment(Stream, T, Lno, Level).
+
+collect_quoted("'B"++T, Lno, L) ->
case check_bin(L) of
true ->
- {{bstring,Lno, lists:reverse(L)}, T};
+ {{bstring,Lno,lists:reverse(L)}, T};
false ->
- throw({'ERR',{invalid_binary_number, lists:reverse(L)}})
+ throw({error,{invalid_binary_number,lists:reverse(L)}})
end;
-collect_quoted([$',$H|T],Lno, L) ->
+collect_quoted("'H"++T, Lno, L) ->
case check_hex(L) of
true ->
- {{hstring,Lno, lists:reverse(L)}, T};
+ {{hstring,Lno,lists:reverse(L)}, T};
false ->
- throw({'ERR',{invalid_binary_number, lists:reverse(L)}})
+ throw({error,{invalid_hex_number,lists:reverse(L)}})
end;
collect_quoted([H|T], Lno, L) ->
collect_quoted(T, Lno,[H|L]);
collect_quoted([], _, _) -> % This should be allowed FIX later
- throw({'ERR',{eol_in_token}}).
+ throw({error,eol_in_token}).
check_bin([$0|T]) ->
check_bin(T);
@@ -351,7 +324,6 @@ reserved_word('INCLUDES') -> true;
reserved_word('INSTANCE') -> true;
reserved_word('INTEGER') -> true;
reserved_word('INTERSECTION') -> true;
-reserved_word('ISO646String') -> rstrtype;
reserved_word('MAX') -> true;
reserved_word('MIN') -> true;
reserved_word('MINUS-INFINITY') -> true;
diff --git a/lib/asn1/test/Makefile b/lib/asn1/test/Makefile
index b1b08aa9f9..ea5a0f857e 100644
--- a/lib/asn1/test/Makefile
+++ b/lib/asn1/test/Makefile
@@ -78,6 +78,7 @@ MODULES= \
testEnumExt \
testInfObjectClass \
testInfObj \
+ testInfObjExtract \
testParameterizedInfObj \
testFragmented \
testMergeCompile \
@@ -104,14 +105,19 @@ MODULES= \
test_compile_options \
testDoubleEllipses \
test_modified_x420 \
- testX420 \
test_x691 \
testWSParamClass \
+ testValueTest \
+ testUniqueObjectSets \
+ testRfcs \
+ testImporting \
+ testExtensibilityImplied \
asn1_test_lib \
asn1_app_test \
asn1_appup_test \
asn1_SUITE \
- error_SUITE
+ error_SUITE \
+ syntax_SUITE
ERL_FILES= $(MODULES:%=%.erl)
diff --git a/lib/asn1/test/asn1_SUITE.erl b/lib/asn1/test/asn1_SUITE.erl
index 432197eec0..9dfcc3f571 100644
--- a/lib/asn1/test/asn1_SUITE.erl
+++ b/lib/asn1/test/asn1_SUITE.erl
@@ -52,9 +52,7 @@ all() ->
groups() ->
Parallel = asn1_test_lib:parallel(),
[{compile, Parallel,
- [c_syntax,
- c_string,
- c_implicit_before_choice,
+ [c_string,
constraint_equivalence]},
{ber, Parallel,
@@ -89,6 +87,7 @@ groups() ->
ber_other,
der,
h323test]},
+ testExtensibilityImplied,
testChoPrim,
testChoExtension,
testChoOptional,
@@ -135,19 +134,19 @@ groups() ->
testChoiceIndefinite,
per_open_type,
testInfObjectClass,
+ testUniqueObjectSets,
+ testInfObjExtract,
testParam,
testFragmented,
testMergeCompile,
testobj,
testDeepTConstr,
- testExport,
testImport,
testDER,
testDEFAULT,
testMvrasn6,
testContextSwitchingTypes,
testOpenTypeImplicitTag,
- duplicate_tags,
testROSE,
testINSTANCE_OF,
testTCAP,
@@ -158,16 +157,19 @@ groups() ->
testNortel,
% Uses 'PKCS7', 'InformationFramework'
{group, [], [test_WS_ParamClass,
- test_modified_x420,
- testX420]},
- testTcapsystem,
- testNBAPsystem,
- testS1AP,
+ test_modified_x420]},
+ %% Don't run all these at the same time.
+ {group, [],
+ [testTcapsystem,
+ testNBAPsystem,
+ testS1AP,
+ testRfcs]},
test_compile_options,
testDoubleEllipses,
test_x691,
ticket_6143,
- test_OTP_9688]},
+ test_OTP_9688,
+ testValueTest]},
{performance, [],
[testTimer_ber,
@@ -196,7 +198,7 @@ init_per_testcase(Func, Config) ->
true = code:add_patha(CaseDir),
Dog = case Func of
- testX420 -> ct:timetrap({minutes, 90});
+ testRfcs -> ct:timetrap({minutes, 90});
_ -> ct:timetrap({minutes, 60})
end,
[{case_dir, CaseDir}, {watchdog, Dog}|Config].
@@ -374,6 +376,12 @@ testExternal(Config, Rule, Opts) ->
testSetOfTag:main(Rule),
testSetTag:main(Rule).
+testExtensibilityImplied(Config) ->
+ test(Config, fun testExtensibilityImplied/3).
+testExtensibilityImplied(Config, Rule, Opts) ->
+ asn1_test_lib:compile("ExtensibilityImplied", Config,
+ [Rule,no_ok_wrapper|Opts]),
+ testExtensibilityImplied:main().
testChoPrim(Config) -> test(Config, fun testChoPrim/3).
testChoPrim(Config, Rule, Opts) ->
@@ -561,39 +569,21 @@ testSetOfCho(Config, Rule, Opts) ->
asn1_test_lib:compile("SetOfCho", Config, [Rule|Opts]),
testSetOfCho:main(Rule).
-c_syntax(Config) ->
- DataDir = ?config(data_dir, Config),
- [{error, _} = asn1ct:compile(filename:join(DataDir, F))
- || F <-["Syntax",
- "BadTypeEnding",
- "BadValueAssignment1",
- "BadValueAssignment2",
- "BadValueSet",
- "ChoiceBadExtension",
- "EnumerationBadExtension",
- "Example",
- "Export1",
- "MissingEnd",
- "SequenceBadComma",
- "SequenceBadComponentName",
- "SequenceBadComponentType",
- "SeqBadComma"]].
-
c_string(Config) ->
test(Config, fun c_string/3).
c_string(Config, Rule, Opts) ->
asn1_test_lib:compile("String", Config, [Rule|Opts]),
asn1ct:test('String').
-c_implicit_before_choice(Config) ->
- test(Config, fun c_implicit_before_choice/3, [ber]).
-c_implicit_before_choice(Config, Rule, Opts) ->
- DataDir = ?config(data_dir, Config),
- CaseDir = ?config(case_dir, Config),
- {error, _R2} = asn1ct:compile(filename:join(DataDir, "CCSNARG3"),
- [Rule, {outdir, CaseDir}|Opts]).
-
constraint_equivalence(Config) ->
+ constraint_equivalence_abs(Config),
+ test(Config, fun constraint_equivalence/3).
+
+constraint_equivalence(Config, Rule, Opts) ->
+ M = 'ConstraintEquivalence',
+ asn1_test_lib:compile(M, Config, [Rule|Opts]).
+
+constraint_equivalence_abs(Config) ->
DataDir = ?config(data_dir, Config),
CaseDir = ?config(case_dir, Config),
Asn1Spec = "ConstraintEquivalence",
@@ -765,6 +755,16 @@ testInfObjectClass(Config, Rule, Opts) ->
testInfObjectClass:main(Rule),
testInfObj:main(Rule).
+testUniqueObjectSets(Config) -> test(Config, fun testUniqueObjectSets/3).
+testUniqueObjectSets(Config, Rule, Opts) ->
+ CaseDir = ?config(case_dir, Config),
+ testUniqueObjectSets:main(CaseDir, Rule, Opts).
+
+testInfObjExtract(Config) -> test(Config, fun testInfObjExtract/3).
+testInfObjExtract(Config, Rule, Opts) ->
+ asn1_test_lib:compile("InfObjExtract", Config, [Rule|Opts]),
+ testInfObjExtract:main().
+
testParam(Config) ->
test(Config, fun testParam/3, [ber,{ber,[der]},per,uper]).
testParam(Config, Rule, Opts) ->
@@ -804,18 +804,14 @@ testDeepTConstr(Config, Rule, Opts) ->
[Rule|Opts]),
testDeepTConstr:main(Rule).
-testExport(Config) ->
- {error, _} =
- asn1ct:compile(filename:join(?config(data_dir, Config),
- "IllegalExport"),
- [{outdir, ?config(case_dir, Config)}]).
-
testImport(Config) ->
test(Config, fun testImport/3).
testImport(Config, Rule, Opts) ->
- Files = ["ImportsFrom","ImportsFrom2","ImportsFrom3"],
+ Files = ["ImportsFrom","ImportsFrom2","ImportsFrom3",
+ "Importing","Exporting"],
asn1_test_lib:compile_all(Files, Config, [Rule|Opts]),
42 = 'ImportsFrom':i(),
+ testImporting:main(),
ok.
testMegaco(Config) -> test(Config, fun testMegaco/3).
@@ -839,24 +835,20 @@ testContextSwitchingTypes(Config, Rule, Opts) ->
testTypeValueNotation(Config) -> test(Config, fun testTypeValueNotation/3).
testTypeValueNotation(Config, Rule, Opts) ->
- asn1_test_lib:compile_all(["SeqTypeRefPrim", "ValueTest"], Config,
- [Rule|Opts]),
+ asn1_test_lib:compile("SeqTypeRefPrim", Config, [Rule|Opts]),
testTypeValueNotation:main(Rule, Opts).
+testValueTest(Config) -> test(Config, fun testValueTest/3).
+testValueTest(Config, Rule, Opts) ->
+ asn1_test_lib:compile("ValueTest", Config, [Rule|Opts]),
+ testValueTest:main().
+
testOpenTypeImplicitTag(Config) ->
test(Config, fun testOpenTypeImplicitTag/3).
testOpenTypeImplicitTag(Config, Rule, Opts) ->
asn1_test_lib:compile("OpenTypeImplicitTag", Config, [Rule|Opts]),
testOpenTypeImplicitTag:main(Rule).
-duplicate_tags(Config) ->
- DataDir = ?config(data_dir, Config),
- CaseDir = ?config(case_dir, Config),
- {error, [{error, {type, _, _, 'SeqOpt1Imp',
- {asn1, {duplicates_of_the_tags, _}}}}]} =
- asn1ct:compile(filename:join(DataDir, "SeqOptional2"),
- [abs, {outdir, CaseDir}]).
-
rtUI(Config) -> test(Config, fun rtUI/3).
rtUI(Config, Rule, Opts) ->
asn1_test_lib:compile("Prim", Config, [Rule|Opts]),
@@ -990,13 +982,22 @@ testS1AP(Config, Rule, Opts) ->
ok
end.
+testRfcs(Config) -> test(Config, fun testRfcs/3, [{ber,[der]}]).
+testRfcs(Config, Rule, Opts) ->
+ case erlang:system_info(system_architecture) of
+ "sparc-sun-solaris2.10" ->
+ {skip,"Too slow for an old Sparc"};
+ _ ->
+ testRfcs:compile(Config, Rule, Opts),
+ testRfcs:test()
+ end.
+
test_compile_options(Config) ->
ok = test_compile_options:wrong_path(Config),
ok = test_compile_options:path(Config),
ok = test_compile_options:noobj(Config),
ok = test_compile_options:record_name_prefix(Config),
- ok = test_compile_options:verbose(Config),
- ok = test_compile_options:warnings_as_errors(Config).
+ ok = test_compile_options:verbose(Config).
testDoubleEllipses(Config) -> test(Config, fun testDoubleEllipses/3).
testDoubleEllipses(Config, Rule, Opts) ->
@@ -1084,6 +1085,7 @@ test_modules() ->
"CommonDataTypes",
"Constraints",
"ContextSwitchingTypes",
+ "CoverParser",
"DS-EquipmentUser-CommonFunctionOrig-TransmissionPath",
"Enum",
"From",
@@ -1118,7 +1120,9 @@ test_modules() ->
"Def",
"Opt",
"ELDAPv3",
- "LDAP"].
+ "LDAP",
+ "SeqOptional2",
+ "CCSNARG3"].
test_OTP_9688(Config) ->
PrivDir = ?config(case_dir, Config),
diff --git a/lib/asn1/test/asn1_SUITE_data/BadTypeEnding.asn b/lib/asn1/test/asn1_SUITE_data/BadTypeEnding.asn
deleted file mode 100644
index 3ccd838ac0..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/BadTypeEnding.asn
+++ /dev/null
@@ -1,6 +0,0 @@
-BadTypeEnding DEFINITIONS ::=
-BEGIN
-
-T ::= Typ;
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/BadValueAssignment1.asn1 b/lib/asn1/test/asn1_SUITE_data/BadValueAssignment1.asn1
deleted file mode 100644
index a5d4984e60..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/BadValueAssignment1.asn1
+++ /dev/null
@@ -1,8 +0,0 @@
-BadValueAssignment1 DEFINITIONS ::=
-BEGIN
-
-int INTEGER ::= 3
-
-int2 integer ::= 3
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/BadValueAssignment2.asn1 b/lib/asn1/test/asn1_SUITE_data/BadValueAssignment2.asn1
deleted file mode 100644
index 7a96406001..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/BadValueAssignment2.asn1
+++ /dev/null
@@ -1,8 +0,0 @@
-BadValueAssignment2 DEFINITIONS ::=
-BEGIN
-
-int INTEGER ::= 3
-
-int2 ::= 3
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/BadValueSet.asn1 b/lib/asn1/test/asn1_SUITE_data/BadValueSet.asn1
deleted file mode 100644
index 68bd4380b7..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/BadValueSet.asn1
+++ /dev/null
@@ -1,9 +0,0 @@
-BadValueSet DEFINITIONS ::=
-BEGIN
-
-Int INTEGER ::= {1|2|3}
-
-Int2 INTEGER ::= {
- 1,2,3}
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/CCSNARG3.asn b/lib/asn1/test/asn1_SUITE_data/CCSNARG3.asn
index 23c1f32ceb..8932238adc 100644
--- a/lib/asn1/test/asn1_SUITE_data/CCSNARG3.asn
+++ b/lib/asn1/test/asn1_SUITE_data/CCSNARG3.asn
@@ -3,7 +3,7 @@ BEGIN
CallCentreServiceNotificationArg ::= SEQUENCE {
scriptInformation [0] ScriptToScriptInformation,
- eventInformation [1] IMPLICIT EventInformation OPTIONAL
+ eventInformation [1] EventInformation OPTIONAL
}
diff --git a/lib/asn1/test/asn1_SUITE_data/ChoExtension.asn1 b/lib/asn1/test/asn1_SUITE_data/ChoExtension.asn1
index f6fe18be10..18473bae30 100644
--- a/lib/asn1/test/asn1_SUITE_data/ChoExtension.asn1
+++ b/lib/asn1/test/asn1_SUITE_data/ChoExtension.asn1
@@ -41,10 +41,4 @@ ChoExt4 ::= CHOICE
str OCTET STRING
}
-ChoEmptyRoot ::= CHOICE {
- ...,
- bool BOOLEAN,
- int INTEGER (0..7)
-}
-
END
diff --git a/lib/asn1/test/asn1_SUITE_data/ChoiceBadExtension.asn1 b/lib/asn1/test/asn1_SUITE_data/ChoiceBadExtension.asn1
deleted file mode 100644
index d0789d7414..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/ChoiceBadExtension.asn1
+++ /dev/null
@@ -1,27 +0,0 @@
-ChoiceBadExtension DEFINITIONS ::=
-BEGIN
-
-Seq ::= SEQUENCE {
- ...,
- name PrintableString,
- location INTEGER {home(0),field(1),roving(2)},
- age INTEGER
- }
-
-Cho1 ::= CHOICE {
- name PrintableString,
- ...,
- location INTEGER {home(0),field(1),roving(2)},
- age INTEGER
- }
-
-Cho2 ::= CHOICE {
- ...,
- name PrintableString,
- location INTEGER {home(0),field(1),roving(2)},
- age INTEGER
- }
-
-END
-
-
diff --git a/lib/asn1/test/asn1_SUITE_data/ConstraintEquivalence.asn1 b/lib/asn1/test/asn1_SUITE_data/ConstraintEquivalence.asn1
index 8b3d151502..648275dd66 100644
--- a/lib/asn1/test/asn1_SUITE_data/ConstraintEquivalence.asn1
+++ b/lib/asn1/test/asn1_SUITE_data/ConstraintEquivalence.asn1
@@ -11,6 +11,10 @@ BEGIN
SingleValueX8 ::= INTEGER (integer42)
SingleValueX9 ::= INTEGER (integer42..integer42)
SingleValueX10 ::= INTEGER ((integer42) INTERSECTION (40..49))
+ SingleValueX11 ::= INTEGER (40..49) (integer42)
+ SingleValueX12 ::= INTEGER ((MIN..0) ^ (1..10) | integer42)
+ SingleValueX13 ::= INTEGER ((11..20) ^ (1..10) | integer42)
+ SingleValueX14 ::= INTEGER ((MIN..42) ^ (1..100) ^ (42..50))
UnconstrainedX0 ::= INTEGER
UnconstrainedX1 ::= INTEGER (MIN..MAX)
@@ -19,6 +23,10 @@ BEGIN
UnconstrainedX4 ::= INTEGER ((MIN..MAX)|9|10)
UnconstrainedX5 ::= INTEGER ((MIN..MAX)|10..20)
UnconstrainedX6 ::= INTEGER ((MIN..MAX) UNION (10..20))
+ UnconstrainedX7 ::= INTEGER ((MIN..MAX) ^ ((MIN..MAX) UNION (10..20)))
+ UnconstrainedX8 ::= INTEGER ((-100..MAX) ^ (42..MAX) | (MIN..41))
+ UnconstrainedX9 ::= INTEGER (UnconstrainedX0)
+ UnconstrainedX10 ::= INTEGER (UnconstrainedX0)(MIN..MAX)
RangeX00 ::= INTEGER (5..10)
RangeX01 ::= INTEGER (4<..<11)
@@ -38,22 +46,66 @@ BEGIN
RangeX16 ::= INTEGER ((5|6) UNION (7) UNION (7<..<11))
RangeX20 ::= INTEGER (0..20) (5..10)
- RangeX21 ::= INTEGER (0..10) (5..20)
- RangeX22 ::= INTEGER (0..10) (5..20) (MIN..MAX)
- RangeX23 ::= INTEGER ((0..10) INTERSECTION (5..20) ^ (MIN..MAX))
- RangeX24 ::= INTEGER ((5|6|7|8|9|10) INTERSECTION (5..20) ^ (MIN..MAX))
+ RangeX21 ::= INTEGER ((0..10) ^ (5..20))
+ RangeX22 ::= INTEGER ((0..10) ^ (5..20) ^ (MIN..MAX))
+ RangeX23 ::= INTEGER (MIN..MAX) (-100..20) (5..10)
+ RangeX24 ::= INTEGER (MIN..MAX) (0..100) (5..20) (5..10)
+ RangeX25 ::= INTEGER ((0..10) INTERSECTION (5..20) ^ (MIN..MAX))
+ RangeX26 ::= INTEGER ((5|6|7|8|9|10) INTERSECTION (5..20) ^ (MIN..MAX))
+
+ RangeX30 ::= INTEGER (((5|6) | (5..20)) ^ (0..10))
+ RangeX31 ::= INTEGER (((((5|6) | (5..20)) ^ (0..10))) ^ (MIN..MAX))
+ RangeX32 ::= INTEGER ((5|7) | (5..10))
+
+ Semi00 ::= INTEGER (0..MAX)
+ Semi01 ::= INTEGER (0..MAX) (MIN..MAX)
+ Semi02 ::= INTEGER ((0..100) UNION (200..MAX) UNION (50..1024))
+
+ RangeExtX00 ::= INTEGER (5..10, ...)
+ RangeExtX01 ::= INTEGER (0..20) (5..10, ...)
+ RangeExtX02 ::= INTEGER (RangeX26) (5..10, ...)
+-- RangeExtX03 ::= RangeX26 (5..10, ...)
+
+ MinRangeX00 ::= INTEGER (MIN..10)
+ MinRangeX01 ::= INTEGER ((MIN..0) | (0..10))
+ MinRangeX02 ::= INTEGER (MIN..MAX) (MIN..100) (MIN..10)
+ MinRangeX03 ::= INTEGER (((MIN..-100)|(-60..-50)) | (MIN..10))
+
+ DisjointRangeX00 ::= INTEGER (0..5 UNION 95..99)
+ DisjointRangeX01 ::= INTEGER (0|1|2|3|4|5|95|96|97|98|99)
+ DisjointRangeX02 ::= INTEGER (0..100) (0..2 UNION 95..99 UNION 3|4|5)
+ DisjointRangeX03 ::= INTEGER (MIN..MAX) (0..2 UNION 95..99 UNION 3|4|5)
+
+ MinDisjointRangeX00 ::= INTEGER (MIN..-100 UNION 100..1000)
+ MinDisjointRangeX01 ::= INTEGER (MIN..-100 UNION 100..1000 UNION (MIN..-100))
+ MinDisjointRangeX02 ::= INTEGER (MIN..-50000 UNION 100..1000 UNION (MIN..-100))
+ MinDisjointRangeX03 ::= INTEGER (MIN..-100 UNION 100..1000 UNION (MIN..-1000000))
+ MinDisjointRangeX04 ::= INTEGER (MIN..-100 UNION 100..1000 UNION (MIN..-1000000))
+ MinDisjointRangeX05 ::= INTEGER (MIN..-100 ^ (MIN..-100) UNION 100..1000)
+ MinDisjointRangeX06 ::= INTEGER (MIN..-100 ^ (MIN..0) UNION 100..1000)
UnconstrainedStringX00 ::= IA5String
UnconstrainedStringX01 ::= IA5String (SIZE (0..MAX))
+ UnconstrainedStringX02 ::= IA5String (SIZE (0..42|43..MAX))
ConstrainedStringX00 ::= IA5String (SIZE (0..5))
ConstrainedStringX01 ::= IA5String (SIZE (0|1|2|3|4|5))
+ StringExtFromX00 ::= IA5String (FROM ("AB", ..., "CD"))(SIZE (1..10, ..., 15..20))
+ StringExtFromX01 ::= IA5String (FROM ("AB", ..., "CD"))(SIZE (1..10, ..., 15..20))
+ StringExtFromX02 ::= IA5String ((FROM ("AB", ..., "CD")) ^ ((SIZE (1..10, ..., 15..20))))
+ StringExtFromX03 ::= IA5String ((FROM ("AB", ..., "CD")) ^ (SIZE (1..10, ..., 15..20)))
+ StringExtFromX04 ::= IA5String (StringExtFromX00)
+
-- Note: None of the back-ends care about the exact values
-- outside of the root range.
ExtConstrainedStringX00 ::= IA5String (SIZE (1..2, ...))
ExtConstrainedStringX01 ::= IA5String (SIZE (1|2, ..., 3))
ExtConstrainedStringX02 ::= IA5String (SIZE (1|2, ..., 3|4|5))
+ ExtConstrainedStringX03 ::= IA5String (SIZE (1|2, ..., 1|2|3|4|5))
+ ExtConstrainedStringX04 ::= IA5String (SIZE (1|2), ..., SIZE (1|2|3|4|5))
+ ExtConstrainedStringX05 ::= IA5String (SIZE (1|2, ...), ...,
+ SIZE (1|2|3|4|5, ...))
integer4 INTEGER ::= 4
integer11 INTEGER ::= 11
diff --git a/lib/asn1/test/asn1_SUITE_data/Constraints.py b/lib/asn1/test/asn1_SUITE_data/Constraints.py
index 3495cd841b..a40c513141 100644
--- a/lib/asn1/test/asn1_SUITE_data/Constraints.py
+++ b/lib/asn1/test/asn1_SUITE_data/Constraints.py
@@ -81,7 +81,7 @@ maxNrOfCellPortionsPerCell-1 INTEGER ::= 35
CellPortionID ::= INTEGER (0..maxNrOfCellPortionsPerCell-1,...)
-- OTP-6763
-T ::= IA5String (SIZE (1|2, ..., SIZE (1|2|3))) -- Dubuisson 268
+T ::= IA5String (SIZE (1|2), ..., SIZE (1|2|3)) -- Dubuisson 268
T2 ::= IA5String (SIZE (1|2, ..., 3)) -- equal with T
-- OTP-8046
@@ -144,5 +144,47 @@ NonOverlapping ::= INTEGER (7280..7560 |
23000..24000 |
24960..26900)
+--
+-- Test INTEGER constraints from fields in objects.
+--
+
+INT-HOLDER ::= CLASS {
+ &id INTEGER UNIQUE,
+ &obj INT-HOLDER OPTIONAL
+} WITH SYNTAX {
+ ID &id
+ [OBJ &obj]
+}
+
+int-holder-1 INT-HOLDER ::= { ID 2 }
+int-holder-2 INT-HOLDER ::= { ID 4 OBJ int-holder-1 }
+
+IntObjectConstr ::= INTEGER (int-holder-2.&obj.&id..int-holder-2.&id)
+
+--
+-- INTEGER constraints defined using named INTEGERs.
+--
+
+ConstrainedNamedInt ::= INTEGER {v1(42)} (v1)
+constrainedNamedInt-1 INTEGER {v1(42)} (v1) ::= 42
+constrainedNamedInt-2 ConstrainedNamedInt ::= 100
+
+SeqWithNamedInt ::= SEQUENCE {
+ int INTEGER {v2(7)} (v2)
+}
+
+--
+-- Cover simpletable constraint checking code.
+--
+
+ContentInfo ::= SEQUENCE {
+ contentType ContentType
+}
+
+Contents TYPE-IDENTIFIER ::= {
+ {OCTET STRING IDENTIFIED BY {2 1 1 1 1 1 1}}
+}
+
+ContentType ::= TYPE-IDENTIFIER.&id({Contents})
END
diff --git a/lib/asn1/test/asn1_SUITE_data/CoverParser.asn1 b/lib/asn1/test/asn1_SUITE_data/CoverParser.asn1
new file mode 100644
index 0000000000..75d40188ca
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/CoverParser.asn1
@@ -0,0 +1,57 @@
+CoverParser DEFINITIONS AUTOMATIC TAGS ::=
+BEGIN
+
+ Cho1 ::= CHOICE {
+ i INTEGER,
+ ... ! 42,
+ [[ b BOOLEAN ]]
+ }
+
+ Cho2 ::= CHOICE {
+ i INTEGER,
+ ...,
+ [[ b BOOLEAN,
+ s IA5String ]],
+ ...
+ }
+
+ Int1 ::= INTEGER (CONSTRAINED BY {INTEGER:1,INTEGER:2})
+
+ Seq1 ::= SEQUENCE {
+ ... ! INTEGER:1
+ }
+
+ Seq2 ::= SEQUENCE {
+ ... ! INTEGER:1,
+ i INTEGER
+ }
+
+ Seq3 ::= SEQUENCE {
+ b BOOLEAN,
+ ... ! INTEGER:1,
+ i INTEGER
+ }
+
+ Seq4 ::= SEQUENCE {
+ a INTEGER OPTIONAL,
+ b OCTET STRING OPTIONAL
+ } (WITH COMPONENTS {a ABSENT, b OPTIONAL} |
+ WITH COMPONENTS {a PRESENT, b PRESENT})
+
+ SeqOf1 ::= SEQUENCE OF INTEGER
+ SeqOf2 ::= SeqOf1 (WITH COMPONENT (0..7))
+
+ SegOf3 ::= SEQUENCE (SIZE (1..10)) OF id INTEGER
+
+ Set1 ::= SET {
+ ... ! INTEGER:1
+ }
+
+ Set2 ::= SET {
+ ... ! INTEGER:1,
+ a INTEGER
+ }
+
+ SetOf3 ::= SET (SIZE (1..10)) OF id INTEGER
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/DoubleEllipses.asn b/lib/asn1/test/asn1_SUITE_data/DoubleEllipses.asn
index e90cf55d61..a96425cbea 100644
--- a/lib/asn1/test/asn1_SUITE_data/DoubleEllipses.asn
+++ b/lib/asn1/test/asn1_SUITE_data/DoubleEllipses.asn
@@ -12,6 +12,15 @@ Seq ::= SEQUENCE
c BOOLEAN
}
+SeqV1 ::= SEQUENCE
+ {
+ a INTEGER,
+ ...,
+ b BOOLEAN,
+ ...
+ }
+
+
SeqV2 ::= SEQUENCE
{
a INTEGER,
@@ -57,6 +66,14 @@ Set ::= SET {
c BOOLEAN
}
+
+SetV1 ::= SET {
+ a INTEGER,
+ ...,
+ b BOOLEAN,
+ ...
+ }
+
SetV2 ::= SET
{
a INTEGER,
@@ -96,4 +113,4 @@ SetAltV2 ::= SET
}
-END \ No newline at end of file
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/EnumExt.asn1 b/lib/asn1/test/asn1_SUITE_data/EnumExt.asn1
index 74fa97e7aa..55ad5a01a1 100644
--- a/lib/asn1/test/asn1_SUITE_data/EnumExt.asn1
+++ b/lib/asn1/test/asn1_SUITE_data/EnumExt.asn1
@@ -53,5 +53,7 @@ SeqBig ::= SEQUENCE {
i INTEGER
}
+EnumSkip ::= ENUMERATED {a(2), ..., b, c, d, e, f}
+
END
diff --git a/lib/asn1/test/asn1_SUITE_data/Example.asn1 b/lib/asn1/test/asn1_SUITE_data/Example.asn1
deleted file mode 100644
index 2639f63940..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/Example.asn1
+++ /dev/null
@@ -1,20 +0,0 @@
-Example DEFINITIONS ::=
-BEGIN
-
-T ::= Typ
-
-Typ ::= SEQUENCE {
- a b,
- c Typ}
---ECLASS ::= CLASS {
--- &num INTEGER UNIQUE,
--- &Typo
--- } WITH SYNTAX {
--- &Typo DETERMINED BY &num
--- }
-
---v1 ECLASS ::= {INTEGER DETERMINED BY 12}
-
---v2 INTEGER ::= 13
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/Export1.asn b/lib/asn1/test/asn1_SUITE_data/Export1.asn
deleted file mode 100644
index 78ead8f4d2..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/Export1.asn
+++ /dev/null
@@ -1,7 +0,0 @@
-Export1 DEFINITIONS ::=
-BEGIN
-EXPORTS T
-
-T ::= Typ
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/Exporting.asn1 b/lib/asn1/test/asn1_SUITE_data/Exporting.asn1
new file mode 100644
index 0000000000..e4f32f6788
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/Exporting.asn1
@@ -0,0 +1,18 @@
+Exporting DEFINITIONS AUTOMATIC TAGS ::=
+BEGIN
+
+ Seq ::= SEQUENCE { id INTEGER, f BOOLEAN }
+ PtSeq{T} ::= SEQUENCE { a T }
+
+ CL ::= CLASS {
+ &id INTEGER UNIQUE,
+ &Type
+ } WITH SYNTAX {
+ ID &id TYPE &Type
+ }
+
+ obj CL ::= { ID 1 TYPE OCTET STRING }
+
+ pt-object{CL:ob} CL ::= {ID ob.&id TYPE OCTET STRING}
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/ExtensibilityImplied.asn1 b/lib/asn1/test/asn1_SUITE_data/ExtensibilityImplied.asn1
new file mode 100644
index 0000000000..d59b0edda5
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/ExtensibilityImplied.asn1
@@ -0,0 +1,30 @@
+ExtensibilityImplied DEFINITIONS
+AUTOMATIC TAGS
+EXTENSIBILITY IMPLIED
+::=
+BEGIN
+
+Enum1 ::= ENUMERATED { root, ..., ext }
+Enum2 ::= ENUMERATED { root }
+
+Seq1 ::= SEQUENCE {
+ b BOOLEAN,
+ ...,
+ i INTEGER
+}
+
+Seq2 ::= SEQUENCE {
+ b BOOLEAN
+}
+
+Set1 ::= SET {
+ b BOOLEAN,
+ ...,
+ i INTEGER
+}
+
+Set2 ::= SET {
+ b BOOLEAN
+}
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/IllegalExport.asn1 b/lib/asn1/test/asn1_SUITE_data/IllegalExport.asn1
deleted file mode 100644
index 1b5e42ad3c..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/IllegalExport.asn1
+++ /dev/null
@@ -1,7 +0,0 @@
-IllegalExport DEFINITIONS ::=
-BEGIN
-EXPORTS T, KalleAnka;
-
-T ::= INTEGER
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/Importing.asn1 b/lib/asn1/test/asn1_SUITE_data/Importing.asn1
new file mode 100644
index 0000000000..2f2699c576
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/Importing.asn1
@@ -0,0 +1,20 @@
+Importing DEFINITIONS AUTOMATIC TAGS ::=
+BEGIN
+
+ Seq ::= Exporting.PtSeq{ INTEGER(0..7) }
+ OtherSeq ::= Exporting.Seq
+
+ seq Exporting.Seq ::= { id 42, f TRUE }
+
+ o1 Exporting.CL ::= { ID 2 TYPE INTEGER (0..63) }
+
+ ObjSet Exporting.CL ::= { o1 | Exporting.obj }
+
+ ObjSeq ::= SEQUENCE {
+ id Exporting.CL.&id ({ObjSet}),
+ type Exporting.CL.&Type ({ObjSet}{@id})
+ }
+
+ o1-cloned Exporting.CL ::= Exporting.pt-object{o1}
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/InfObj.asn b/lib/asn1/test/asn1_SUITE_data/InfObj.asn
index 719119f418..3b88770d78 100644
--- a/lib/asn1/test/asn1_SUITE_data/InfObj.asn
+++ b/lib/asn1/test/asn1_SUITE_data/InfObj.asn
@@ -206,7 +206,9 @@ ConstructedDefaultSet CONSTRUCTED-DEFAULT ::= {
{ &id 4, &Type SET { a INTEGER, b BIT STRING } } |
{ &id 5, &Type CHOICE { i INTEGER, b BIT STRING } } |
{ &id 6, &Type SEQUENCE OF INTEGER (1..16) } |
- { &id 7, &Type SET OF INTEGER (1..64) }
+ { &id 7, &Type SET OF INTEGER (1..64) } |
+ { &id 8, &Type SEQUENCE OF SEQUENCE { x INTEGER, y INTEGER } } |
+ { &id 9, &Type SET OF SEQUENCE { x INTEGER, y INTEGER } }
}
ConstructedPdu ::= SEQUENCE {
@@ -288,18 +290,196 @@ OstSeq1234 ::= ObjectSetTest{ {Ost1234} }
OstSeq45 ::= ObjectSetTest{ {Ost45} }
OstSeq12345 ::= ObjectSetTest{ {Ost12345} }
+OstSeq12Except ::= ObjectSetTest{ {Ost123 EXCEPT ost3} }
+OstSeq123Except ::= ObjectSetTest{ {Ost12345 EXCEPT Ost45} }
+
+ExOst1 OBJECT-SET-TEST ::= { ost1, ... }
ExOst12 OBJECT-SET-TEST ::= { ost1, ..., ost2 }
ExOst123 OBJECT-SET-TEST ::= { ost3, ..., ExOst12 }
---ExOst1234 OBJECT-SET-TEST ::= { ExOst123, ..., ost4 }
+ExOst1234 OBJECT-SET-TEST ::= { ExOst123, ..., ost4 }
ExOst45 OBJECT-SET-TEST ::= { ost4, ..., ost5 }
ExOst12345 OBJECT-SET-TEST ::= { ExOst123, ..., ExOst45 }
+ExOstSeq1 ::= ObjectSetTest{ {ExOst1} }
ExOstSeq12 ::= ObjectSetTest{ {ExOst12} }
ExOstSeq123 ::= ObjectSetTest{ {ExOst123} }
---ExOstSeq1234 ::= ObjectSetTest{ {ExOst1234} }
+ExOstSeq1234 ::= ObjectSetTest{ {ExOst1234} }
ExOstSeq45 ::= ObjectSetTest{ {ExOst45} }
ExOstSeq12345 ::= ObjectSetTest{ {ExOst12345} }
-END
+ExOstSeq12Except ::= ObjectSetTest{ {ExOst123 EXCEPT ost3} }
+ExOstSeq123Except ::= ObjectSetTest{ {ExOst12345 EXCEPT ExOst45} }
+
+ExInlOst1 OBJECT-SET-TEST ::= {
+ { 1 IS BIT STRING },
+ ...
+}
+ExInlOst12 OBJECT-SET-TEST ::= {
+ { 1 IS BIT STRING },
+ ...,
+ { 2 IS OCTET STRING }
+}
+
+ExInlOstSeq1 ::= ObjectSetTest{ {ExInlOst1} }
+ExInlOstSeq12 ::= ObjectSetTest{ {ExInlOst12} }
+
+--
+-- Test that extensions in a simple class works.
+--
+
+ExtClassSeq ::= SEQUENCE {
+ arg EXT-CLASS.&id({Extend})
+}
+
+EXT-CLASS ::= CLASS {
+ &id INTEGER UNIQUE
+} WITH SYNTAX {
+ ID &id
+}
+
+Extend EXT-CLASS ::= { { ID alt1 } | { ID alt2 }, ... }
+
+alt1 INTEGER ::= 4
+alt2 INTEGER ::= 5
+
+
+--
+-- Test a BIT STRING which is optional in the simplified syntax.
+--
+
+PUBLIC-KEY ::= CLASS {
+ &id INTEGER UNIQUE,
+ &keyUsage KeyUsage OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [OPTIONAL-BIT-STRING &keyUsage]
+}
+
+KeyUsage ::= BIT STRING {
+ digitalSignature (0),
+ nonRepudiation (1),
+ keyEncipherment (2)
+ }
+
+object-with-optional-bit-string PUBLIC-KEY ::= {
+ IDENTIFIER 42
+ OPTIONAL-BIT-STRING {digitalSignature, nonRepudiation, keyEncipherment}
+}
+
+-- Test object identifiers from objects.
+
+CONTAINER ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &rid RELATIVE-OID OPTIONAL,
+ &Type OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIED BY &id
+ [REL-OID &rid]
+ [TYPE &Type]
+}
+
+id1 OBJECT IDENTIFIER ::= {1 2 42}
+obj1 CONTAINER ::= { IDENTIFIED BY id1 REL-OID {100 101} }
+
+value-2 OBJECT IDENTIFIER ::= { value-1 25 }
+value-1 OBJECT IDENTIFIER ::= obj1.&id
+value-3 RELATIVE-OID ::= obj1.&rid
+value-4 OBJECT IDENTIFIER ::= { 1 2 value-3 }
+
+
+-- Test an obscure issue when ATTRIBUTE.&id was not
+-- properly evaluated.
+
+Rdn ::= SingleAttribute { {SupportedAttributes} }
+
+ATTRIBUTE ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Type OPTIONAL
+}
+
+SingleAttribute{ATTRIBUTE:AttrSet} ::= SEQUENCE {
+ type ATTRIBUTE.&id({AttrSet}),
+ value ATTRIBUTE.&Type({AttrSet}{@type})
+}
+AttributeType ::= ATTRIBUTE.&id
+SupportedAttributes ATTRIBUTE ::= { at-name }
+
+id-at OBJECT IDENTIFIER ::= { 2 5 4 41 }
+id-at-name AttributeType ::= id-at
+at-name ATTRIBUTE ::= { &Type PrintableString, &id id-at-name }
+
+--
+-- Test using an alias for TYPE-IDENTIFIER.
+--
+
+TiAliasParameterized { TI-ALIAS:InfoObjectSet } ::= SEQUENCE {
+ algorithm TI-ALIAS.&id({InfoObjectSet}),
+ parameters TI-ALIAS.&Type({InfoObjectSet} {@algorithm}) OPTIONAL
+}
+
+TI-ALIAS ::= TYPE-IDENTIFIER
+
+TiAliasSeq ::= SEQUENCE {
+ prf TiAliasParameterized {{TiAliasSet}}
+}
+
+TiAliasSet TI-ALIAS ::= {
+ {NULL IDENTIFIED BY {2 1 2}},
+ ...
+}
+
+--
+-- Test using an alias for a class.
+--
+
+ALIAS-CONTAINER ::= CLASS {
+ &id INTEGER UNIQUE,
+ &obj INDIRECT-CLASS
+}
+
+INDIRECTED-CLASS ::= CLASS {
+ &id INTEGER UNIQUE,
+ &Type
+}
+
+INDIRECT-CLASS ::= INDIRECTED-CLASS
+
+--
+-- Indirect ObjectClassFieldType in a SEQUENCE.
+--
+
+ContentInfo ::= SEQUENCE {
+ contentType ContentType, -- Indirect ObjectClassFieldType
+ content TYPE-IDENTIFIER.&Type({Contents}{@contentType})
+OPTIONAL
+}
+
+Contents TYPE-IDENTIFIER ::= {
+ {IA5String IDENTIFIED BY id-content-type}
+}
+
+ContentType ::= TYPE-IDENTIFIER.&id({Contents})
+id-content-type ContentType ::= { 2 7 8 9 }
+
+--
+-- Tricky parsing of simplified syntax.
+--
+
+TrickyType-1 ::= BIT STRING
+TrickyType-2 ::= OCTET STRING
+
+TRICKY ::= CLASS {
+ &Type1,
+ &Type2
+} WITH SYNTAX {
+ TYPE &Type1 &Type2
+}
+
+tricky-object TRICKY ::= {TYPE TrickyType-1 TrickyType-2}
+
+tricky-bit-string tricky-object.&Type1 ::= '1011'B
+tricky-octet-string tricky-object.&Type1 ::= 'CAFE'H
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/InfObjExtract.asn1 b/lib/asn1/test/asn1_SUITE_data/InfObjExtract.asn1
new file mode 100644
index 0000000000..13981b546d
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/InfObjExtract.asn1
@@ -0,0 +1,136 @@
+InfObjExtract DEFINITIONS AUTOMATIC TAGS ::=
+BEGIN
+
+DATA-CLASS ::= CLASS {
+ &id INTEGER UNIQUE,
+ &Type
+} WITH SYNTAX {
+ ID &id
+ TYPE &Type
+}
+
+data-object-1 DATA-CLASS ::= { ID 1 TYPE BOOLEAN }
+data-object-2 DATA-CLASS ::= { ID 2 TYPE OCTET STRING }
+data-object-3 DATA-CLASS ::= { ID 3 TYPE BIT STRING }
+
+ObjSet DATA-CLASS ::= {
+ holder-object-1.&obj |
+ data-object-2 |
+ data-object-3,
+ ...
+}
+
+OBJ-SET DATA-CLASS ::= {
+ holder-object-1.&obj |
+ data-object-2 |
+ data-object-3,
+ ...
+}
+
+SingleElementSet DATA-CLASS ::= {
+ holder-object-1.&obj
+}
+
+holder-object-1 HOLDER-CLASS ::= {
+ OBJ data-object-1
+}
+
+holder-object-2 HOLDER-CLASS ::= {
+ OBJ-SET {data-object-1}
+}
+
+holder-object-3 HOLDER-CLASS ::= {
+ OBJ-SET {holder-object-2.&ObjSet}
+}
+
+-- Note: References to object sets with names in all uppercase/hyphens
+-- may be represented differently compared to object sets with names
+-- that contain lowercase letters. CAVEAT TESTOR.
+
+HOLDER-OBJECTS HOLDER-CLASS ::= { holder-object-2 }
+HolderObjects HOLDER-CLASS ::= { holder-object-3 }
+
+holder-object-4 HOLDER-CLASS ::= {
+ OBJ-SET { HOLDER-OBJECTS.&ObjSet }
+}
+
+holder-object-5 HOLDER-CLASS ::= {
+ OBJ-SET { HolderObjects.&ObjSet }
+}
+
+holder-object-6 HOLDER-CLASS ::= {
+ OBJ-SET { OBJ-SET }
+}
+
+holder-object-7 HOLDER-CLASS ::= {
+ OBJ-SET { ObjSet }
+}
+
+HOLDER-CLASS ::= CLASS {
+ &obj DATA-CLASS OPTIONAL,
+ &ObjSet DATA-CLASS OPTIONAL
+} WITH SYNTAX {
+ [OBJ &obj]
+ [OBJ-SET &ObjSet]
+}
+
+TestSeq{DATA-CLASS:ObjectSet} ::= SEQUENCE {
+ id DATA-CLASS.&id ({ObjectSet}),
+ data DATA-CLASS.&Type ({ObjectSet}{@id})
+}
+
+DataSeq-1 ::= TestSeq{ {ObjSet} }
+DataSeq-2 ::= TestSeq{ {holder-object-3.&ObjSet} }
+
+DataSeq-3 ::= TestSeq{ {holder-object-4.&ObjSet} }
+DataSeq-4 ::= TestSeq{ {holder-object-5.&ObjSet} }
+DataSeq-5 ::= TestSeq{ {holder-object-6.&ObjSet} }
+DataSeq-6 ::= TestSeq{ {holder-object-7.&ObjSet} }
+
+DataSeqSingleSet-1 ::= TestSeq{ {SingleElementSet} }
+DataSeqSingleSet-2 ::= TestSeq{ {holder-object-1.&obj} }
+
+--
+-- Test ObjectSetFromObjects.
+--
+
+OBJ-CLASS ::= CLASS {
+ &id INTEGER UNIQUE,
+ &Data OPTIONAL,
+ &Obj OBJ-CLASS OPTIONAL,
+ &obj OBJ-CLASS OPTIONAL
+}
+
+obj-class-obj-1 OBJ-CLASS ::= { &id 1, &Data BOOLEAN }
+
+obj-class-obj-2 OBJ-CLASS ::= { &id 2, &Data BOOLEAN,
+ &Obj {obj-class-obj-1} }
+
+obj-class-obj-3 OBJ-CLASS ::= { &id 3, &Data BOOLEAN,
+ &obj {&id 99, &Obj {obj-class-obj-1}} }
+
+obj-class-obj-4 OBJ-CLASS ::= { &id 4, &Data BOOLEAN, &obj obj-class-obj-2 }
+
+obj-class-obj-5 OBJ-CLASS ::= { &id 5, &Data BOOLEAN,
+ &Obj {obj-class-obj-4.&obj} }
+
+ObjClassSet OBJ-CLASS ::= { obj-class-obj-3.&obj.&Obj |
+ obj-class-obj-4.&Obj | -- Non-existing field
+ obj-class-obj-5.&Obj
+ }
+
+TestObjClassSeq{OBJ-CLASS:ObjectSet} ::= SEQUENCE {
+ id OBJ-CLASS.&id ({ObjectSet}),
+ data OBJ-CLASS.&Data ({ObjectSet}{@id})
+}
+
+ObjClassSeq-1 ::= TestObjClassSeq{{ObjClassSet}}
+
+--
+-- Test several levels of inlined definitions.
+--
+
+obj-class-obj-6 OBJ-CLASS ::= { &id 6, &Obj {{&id 100, &Data INTEGER}},
+ &Data INTEGER }
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/MissingEnd.asn1 b/lib/asn1/test/asn1_SUITE_data/MissingEnd.asn1
deleted file mode 100644
index 66912ef693..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/MissingEnd.asn1
+++ /dev/null
@@ -1,5 +0,0 @@
-MissingEnd DEFINITIONS ::=
-BEGIN
-
-T ::= Typ
-
diff --git a/lib/asn1/test/asn1_SUITE_data/ObjIdValues.asn1 b/lib/asn1/test/asn1_SUITE_data/ObjIdValues.asn1
index 9368e8dceb..9193ed495c 100644
--- a/lib/asn1/test/asn1_SUITE_data/ObjIdValues.asn1
+++ b/lib/asn1/test/asn1_SUITE_data/ObjIdValues.asn1
@@ -50,6 +50,7 @@ itu-t-o OBJECT IDENTIFIER ::= {itu-t recommendation o}
itu-t-p OBJECT IDENTIFIER ::= {itu-t recommendation p}
itu-t-q OBJECT IDENTIFIER ::= {itu-t recommendation q}
itu-t-r OBJECT IDENTIFIER ::= {itu-t recommendation r}
+itu-t-s OBJECT IDENTIFIER ::= {itu-t recommendation s}
itu-t-t OBJECT IDENTIFIER ::= {itu-t recommendation t}
itu-t-u OBJECT IDENTIFIER ::= {itu-t recommendation u}
itu-t-v OBJECT IDENTIFIER ::= {itu-t recommendation v}
diff --git a/lib/asn1/test/asn1_SUITE_data/ParamBasic.asn1 b/lib/asn1/test/asn1_SUITE_data/ParamBasic.asn1
index 68fc782f33..d203b6c816 100644
--- a/lib/asn1/test/asn1_SUITE_data/ParamBasic.asn1
+++ b/lib/asn1/test/asn1_SUITE_data/ParamBasic.asn1
@@ -42,4 +42,37 @@ SIGNATURE-ALGORITHM ::= CLASS {
KEY &id CONTAINING &Type
}
+alg-seq-1 AnAlgorithm ::= { algorithm 1, type 42 }
+alg-seq-2 AnAlgorithm ::= { algorithm 2, type TRUE }
+
+--
+-- Test that indirect classes references are resolved.
+--
+
+AlgorithmIdentifier2 { ALGORITHM-IDENTIFIER:InfoObjectSet } ::= SEQUENCE {
+ algorithm ALGORITHM-IDENTIFIER.&id({InfoObjectSet}),
+ parameters ALGORITHM-IDENTIFIER.&Type({InfoObjectSet} {@algorithm}) OPTIONAL
+}
+
+ALGORITHM-IDENTIFIER ::= TYPE-IDENTIFIER
+
+Seq ::= SEQUENCE {
+ c1 AlgorithmIdentifier2 {{ObjectSet-1}},
+ c2 AlgorithmIdentifier2 {{ObjectSet-2}}
+}
+
+ObjectSet-1 ALGORITHM-IDENTIFIER ::= { {INTEGER IDENTIFIED BY {2 1 1}}, ... }
+ObjectSet-2 ALGORITHM-IDENTIFIER ::= { ... }
+
+-- Test a value that uses the instantiation of a parameterized type inline.
+-- (Adapted from PKCS-5.)
+--
+
+algid-hmacWithSHA1 AlgorithmIdentifier2 {{ObjectSet-3}} ::=
+ {algorithm id-hmacWithSHA1, parameters NULL : NULL}
+
+ObjectSet-3 TYPE-IDENTIFIER ::= { {NULL IDENTIFIED BY id-hmacWithSHA1} }
+
+id-hmacWithSHA1 OBJECT IDENTIFIER ::= {2 9 9 9 7}
+
END
diff --git a/lib/asn1/test/asn1_SUITE_data/Prim.asn1 b/lib/asn1/test/asn1_SUITE_data/Prim.asn1
index cc0e61422a..b4c011fd39 100644
--- a/lib/asn1/test/asn1_SUITE_data/Prim.asn1
+++ b/lib/asn1/test/asn1_SUITE_data/Prim.asn1
@@ -24,6 +24,8 @@ BEGIN
friday(5),saturday(6),sunday(7)}
SingleEnumVal ::= ENUMERATED {true}
SingleEnumValExt ::= ENUMERATED {true, ...}
+ NegEnumVal ::= ENUMERATED {neg(-1), ..., zero(0)}
+ EnumVal128 ::= ENUMERATED {val(128)}
ObjId ::= OBJECT IDENTIFIER
diff --git a/lib/asn1/test/asn1_SUITE_data/SelectionType.asn b/lib/asn1/test/asn1_SUITE_data/SelectionType.asn
index d7bfbf1788..6163f390dd 100644
--- a/lib/asn1/test/asn1_SUITE_data/SelectionType.asn
+++ b/lib/asn1/test/asn1_SUITE_data/SelectionType.asn
@@ -14,7 +14,7 @@ Element ::= CHOICE {bool BOOLEAN,
utf UTF8String,
ro RELATIVE-OID,
nums NumericString,
- symbol PrintableString,
+ symbol PrintableString,
telet TeletexString,
t61 T61String,
video VideotexString,
@@ -23,13 +23,14 @@ Element ::= CHOICE {bool BOOLEAN,
generalizedTime GeneralizedTime,
gs GraphicString,
vs VisibleString,
--- iso64 ISO646String,
generalString GeneralString,
univ UniversalString,
cs CHARACTER STRING,
bmp BMPString}
-MendeleyevTable ::= SEQUENCE OF symbol < Element
+MendeleyevTable ::= SEQUENCE OF symbol < Element
+MendeleyevSet ::= SET OF atomic-no < Element
+
BoolType ::= bool < Element
einsteinium symbol < Element ::= "Es"
@@ -51,7 +52,6 @@ utctimev utctime < Element ::= "9805281429Z"
gTime generalizedTime < Element ::= "19980528142905.1"
gsv gs < Element ::= "graphic"
vsv vs < Element ::= "visible"
---iso64v iso64 < Element ::= "iso"
gStringv generalString < Element ::= "general"
univv univ < Element ::= "Universal"
bmov bmp < Element ::= "bmp"
diff --git a/lib/asn1/test/asn1_SUITE_data/Seq.py b/lib/asn1/test/asn1_SUITE_data/Seq.py
index f345373ab5..b68f9045a6 100644
--- a/lib/asn1/test/asn1_SUITE_data/Seq.py
+++ b/lib/asn1/test/asn1_SUITE_data/Seq.py
@@ -142,7 +142,10 @@ SeqImp3 ::= SET
set Set1
}
-
+SeqCompOf ::= SEQUENCE {
+ ...,
+ COMPONENTS OF SeqS3
+}
END
diff --git a/lib/asn1/test/asn1_SUITE_data/SeqOptional2.asn b/lib/asn1/test/asn1_SUITE_data/SeqOptional2.asn
index 7de9134096..bb85c9e418 100644
--- a/lib/asn1/test/asn1_SUITE_data/SeqOptional2.asn
+++ b/lib/asn1/test/asn1_SUITE_data/SeqOptional2.asn
@@ -15,10 +15,10 @@ SeqOpt1Imp ::= SEQUENCE
bool1 [1] BOOLEAN OPTIONAL,
int1 INTEGER,
seq1 [2] SeqIn OPTIONAL,
- seq2 [2] SeqIn OPTIONAL,
+ seq2 [3] SeqIn OPTIONAL,
...,
- int2 [3] SeqIn,
- int3 [3] SeqIn
+ int2 [4] SeqIn,
+ int3 [5] SeqIn
}
SeqOpt1Exp ::= SEQUENCE
diff --git a/lib/asn1/test/asn1_SUITE_data/SequenceBadComma.asn b/lib/asn1/test/asn1_SUITE_data/SequenceBadComma.asn
deleted file mode 100644
index 436815aa9b..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/SequenceBadComma.asn
+++ /dev/null
@@ -1,10 +0,0 @@
-SequenceBadComma DEFINITIONS IMPLICIT TAGS ::=
-BEGIN
-EXPORTS Person;
-
-Person ::= [PRIVATE 19] SEQUENCE {,
- name PrintableString,
- location INTEGER {home(0),field(1),roving(2)},
- age INTEGER OPTIONAL
- }
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/SequenceBadComponentName.asn1 b/lib/asn1/test/asn1_SUITE_data/SequenceBadComponentName.asn1
deleted file mode 100644
index 8b2b8816db..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/SequenceBadComponentName.asn1
+++ /dev/null
@@ -1,10 +0,0 @@
-SequenceBadComponentName DEFINITIONS ::=
-BEGIN
-
-T ::= Typ
-
-Typ ::= SEQUENCE {
- a INTEGER,
- C Typ}
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/SequenceBadComponentType.asn1 b/lib/asn1/test/asn1_SUITE_data/SequenceBadComponentType.asn1
deleted file mode 100644
index 0c33f48906..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/SequenceBadComponentType.asn1
+++ /dev/null
@@ -1,10 +0,0 @@
-SequenceBadComponentType DEFINITIONS ::=
-BEGIN
-
-T ::= Typ
-
-Typ ::= SEQUENCE {
- a b,
- c T}
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/Syntax.py b/lib/asn1/test/asn1_SUITE_data/Syntax.py
deleted file mode 100644
index 867d1148e1..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/Syntax.py
+++ /dev/null
@@ -1,10 +0,0 @@
-Syntax DEFINITIONS IMPLICIT TAGS ::=
-BEGIN
-EXPORTS Person;
-
-Person ::= [PRIVATE 19] SEQUENCE {,
- name PrintableString,
- location INTEGER {home(0),field(1),roving(2)},
- age INTEGER OPTIONAL
- }
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/ValueTest.asn b/lib/asn1/test/asn1_SUITE_data/ValueTest.asn
index dae9ae498a..b2c59d686a 100644
--- a/lib/asn1/test/asn1_SUITE_data/ValueTest.asn
+++ b/lib/asn1/test/asn1_SUITE_data/ValueTest.asn
@@ -1,4 +1,4 @@
-ValueTest DEFINITIONS ::=
+ValueTest DEFINITIONS AUTOMATIC TAGS ::=
BEGIN
@@ -23,8 +23,15 @@ vENUMERATED RadioButton ::= button1
vBS BSNNL ::= {zero,two}
vNULL NULL ::= NULL
vOS OCTET STRING ::= '313233'H
-vOD OBJECT IDENTIFIER ::= {2 1 1}
+-- OBJECT IDENTIFIER
+vOD OBJECT IDENTIFIER ::= {2 1 1}
+one INTEGER ::= 1
+integer-first OBJECT IDENTIFIER ::= {one 2}
+rel-oid-1 RELATIVE-OID ::= {2 4 5}
+include-roid OBJECT IDENTIFIER ::= {0 rel-oid-1}
+include-oid OBJECT IDENTIFIER ::= {integer-first 1}
+include-all OBJECT IDENTIFIER ::= {integer-first 1 rel-oid-1 42}
--Character strings
numericstring NumericString ::= "01234567"
@@ -41,7 +48,6 @@ objectdescriptor ObjectDescriptor ::= "ObjectDescriptor"
graphicstring GraphicString ::= "GraphicString"
generalstring GeneralString ::= "GeneralString"
bmpstring1 BMPString ::= "BMPString"
---bmpstring2 BMPString ::= [{0,0,0,66},{0,0,0,77},{0,0,0,80},{0,0,0,115},{0,0,0,116},{0,0,0,114},{0,0,0,105},{0,0,0,110},{0,0,0,103}]
latinCapitalLetterA UniversalString ::= {0,0,0,65}
greekCapitalLetterSigma UniversalString ::= {0,0,3,145}
my-universalstring UniversalString ::= {"This is a capital A: ",
@@ -50,4 +56,88 @@ my-universalstring UniversalString ::= {"This is a capital A: ",
greekCapitalLetterSigma,
"; try and spot the difference!"}
+-- Useful parameterized SEQUENCE.
+ParamSeq{Type} ::= SEQUENCE {
+ a Type
+}
+
+-- Integer values.
+IntegerSeq ::= ParamSeq{INTEGER}
+someInteger INTEGER ::= 42
+integerSeq1 IntegerSeq ::= { a otherInteger }
+otherInteger INTEGER ::= someInteger
+
+--
+-- Values from objects.
+--
+int-from-object-1 INTEGER ::= int-holder-2.&obj.&id
+int-from-object-2 INTEGER ::= int-holder-2.&id
+
+INT-HOLDER ::= CLASS {
+ &id INTEGER UNIQUE,
+ &obj INT-HOLDER OPTIONAL
+} WITH SYNTAX {
+ ID &id
+ [OBJ &obj]
+}
+
+int-holder-1 INT-HOLDER ::= { ID 2 }
+int-holder-2 INT-HOLDER ::= { ID 4 OBJ int-holder-1 }
+
+II ::= INTEGER (int-from-object-1..int-from-object-2)
+
+-- Recursive OCTET STRING definitions.
+
+OS-HOLDER ::= CLASS {
+ &id INTEGER UNIQUE,
+ &os OCTET STRING
+} WITH SYNTAX {
+ ID &id OS &os
+}
+
+os-holder-1 OS-HOLDER ::= { ID 1 OS '4041FF'H }
+
+OctetStringSeq ::= ParamSeq{OCTET STRING}
+
+someOctetString OCTET STRING ::= '404142'H
+
+octetStringSeq1 OctetStringSeq ::= { a someOctetString }
+octetStringSeq2 OctetStringSeq ::= { a otherOctetString }
+octetStringSeq3 OctetStringSeq ::= { a os-holder-1.&os }
+
+otherOctetString OCTET STRING ::= someOctetString
+
+os-1 OCTET STRING ::= os-2
+os-2 OCTET STRING ::= os-holder-1.&os
+
+-- Recursive BIT STRING definitions.
+
+BS-HOLDER ::= CLASS {
+ &id INTEGER UNIQUE,
+ &bs BIT STRING,
+ &named-bs NamedBsType
+} WITH SYNTAX {
+ ID &id BS &bs NAMED-BS &named-bs
+}
+bs-holder-1 BS-HOLDER ::= { ID 1 BS '101'B NAMED-BS {a,c} }
+
+NamedBsType ::= BIT STRING {a(0),b(1),c(2)}
+BsSeq ::= SEQUENCE {
+ a BIT STRING,
+ b NamedBsType
+}
+
+someBitString BIT STRING ::= '101101'B
+
+bsSeq1 BsSeq ::= { a someBitString, b someNamedBs }
+bsSeq2 BsSeq ::= { a otherBitString, b someOtherNamedBs }
+bsSeq3 BsSeq ::= { a bs-holder-1.&bs, b bs-holder-1.&named-bs }
+
+otherBitString BIT STRING ::= someBitString
+bsFromObjectInd BIT STRING ::= bsFromObject
+bsFromObject BIT STRING ::= bs-holder-1.&bs
+
+someOtherNamedBs NamedBsType ::= someNamedBs
+someNamedBs NamedBsType ::= {c}
+
END
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/ACSE-1.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/ACSE-1.asn1
index 3f1385323a..3f1385323a 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/ACSE-1.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/ACSE-1.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/AlgorithmInformation-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/AlgorithmInformation-2009.asn1
new file mode 100644
index 0000000000..f912966c72
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/AlgorithmInformation-2009.asn1
@@ -0,0 +1,466 @@
+AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+DEFINITIONS EXPLICIT TAGS ::=
+BEGIN
+EXPORTS ALL;
+IMPORTS
+
+KeyUsage
+FROM PKIX1Implicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1)
+ security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-implicit-02(59)} ;
+
+-- Suggested prefixes for algorithm objects are:
+--
+-- mda- Message Digest Algorithms
+-- sa- Signature Algorithms
+-- kta- Key Transport Algorithms (Asymmetric)
+-- kaa- Key Agreement Algorithms (Asymmetric)
+-- kwa- Key Wrap Algorithms (Symmetric)
+-- kda- Key Derivation Algorithms
+-- maca- Message Authentication Code Algorithms
+-- pk- Public Key
+-- cea- Content (symmetric) Encryption Algorithms
+-- cap- S/MIME Capabilities
+
+ParamOptions ::= ENUMERATED {
+ required, -- Parameters MUST be encoded in structure
+ preferredPresent, -- Parameters SHOULD be encoded in structure
+ preferredAbsent, -- Parameters SHOULD NOT be encoded in structure
+ absent, -- Parameters MUST NOT be encoded in structure
+ inheritable, -- Parameters are inherited if not present
+ optional, -- Parameters MAY be encoded in the structure
+ ...
+}
+
+-- DIGEST-ALGORITHM
+--
+-- Describes the basic information for ASN.1 and a digest
+-- algorithm.
+--
+-- &id - contains the OID identifying the digest algorithm
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+--
+-- Additional information such as the length of the hash could have
+-- been encoded. Without a clear understanding of what information
+-- is needed by applications, such extraneous information was not
+-- considered to be of sufficent importance.
+--
+-- Example:
+-- mda-sha1 DIGEST-ALGORITHM ::= {
+-- IDENTIFIER id-sha1
+-- PARAMS TYPE NULL ARE preferredAbsent
+-- }
+
+DIGEST-ALGORITHM ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [PARAMS [TYPE &Params] ARE &paramPresence ]
+}
+
+-- SIGNATURE-ALGORITHM
+--
+-- Describes the basic properties of a signature algorithm
+--
+-- &id - contains the OID identifying the signature algorithm
+-- &Value - contains a type definition for the value structure of
+-- the signature; if absent, implies that no ASN.1
+-- encoding is performed on the value
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &HashSet - The set of hash algorithms used with this
+-- signature algorithm
+-- &PublicKeySet - the set of public key algorithms for this
+-- signature algorithm
+-- &smimeCaps - contains the object describing how the S/MIME
+-- capabilities are presented.
+--
+-- Example:
+-- sig-RSA-PSS SIGNATURE-ALGORITHM ::= {
+-- IDENTIFIER id-RSASSA-PSS
+-- PARAMS TYPE RSASSA-PSS-params ARE required
+-- HASHES { mda-sha1 | mda-md5, ... }
+-- PUBLIC-KEYS { pk-rsa | pk-rsa-pss }
+-- }
+
+SIGNATURE-ALGORITHM ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Value OPTIONAL,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &HashSet DIGEST-ALGORITHM OPTIONAL,
+ &PublicKeySet PUBLIC-KEY OPTIONAL,
+ &smimeCaps SMIME-CAPS OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [VALUE &Value]
+ [PARAMS [TYPE &Params] ARE &paramPresence ]
+ [HASHES &HashSet]
+ [PUBLIC-KEYS &PublicKeySet]
+ [SMIME-CAPS &smimeCaps]
+}
+
+-- PUBLIC-KEY
+--
+-- Describes the basic properties of a public key
+--
+-- &id - contains the OID identifying the public key
+-- &KeyValue - contains the type for the key value
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &keyUsage - contains the set of bits that are legal for this
+-- key type. Note that is does not make any statement
+-- about how bits may be paired.
+-- &PrivateKey - contains a type structure for encoding the private
+-- key information.
+--
+-- Example:
+-- pk-rsa-pss PUBLIC-KEY ::= {
+-- IDENTIFIER id-RSASSA-PSS
+-- KEY RSAPublicKey
+-- PARAMS TYPE RSASSA-PSS-params ARE optional
+-- CERT-KEY-USAGE { .... }
+-- }
+
+PUBLIC-KEY ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &KeyValue OPTIONAL,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &keyUsage KeyUsage OPTIONAL,
+ &PrivateKey OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [KEY &KeyValue]
+ [PARAMS [TYPE &Params] ARE &paramPresence]
+ [CERT-KEY-USAGE &keyUsage]
+ [PRIVATE-KEY &PrivateKey]
+}
+
+-- KEY-TRANSPORT
+--
+-- Describes the basic properties of a key transport algorithm
+--
+-- &id - contains the OID identifying the key transport algorithm
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &PublicKeySet - specifies which public keys are used with
+-- this algorithm
+-- &smimeCaps - contains the object describing how the S/MIME
+-- capabilities are presented.
+--
+-- Example:
+-- kta-rsaTransport KEY-TRANSPORT ::= {
+-- IDENTIFIER &id
+-- PARAMS TYPE NULL ARE required
+-- PUBLIC-KEYS { pk-rsa | pk-rsa-pss }
+-- }
+
+KEY-TRANSPORT ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &PublicKeySet PUBLIC-KEY OPTIONAL,
+ &smimeCaps SMIME-CAPS OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [PARAMS [TYPE &Params] ARE &paramPresence]
+ [PUBLIC-KEYS &PublicKeySet]
+ [SMIME-CAPS &smimeCaps]
+}
+
+-- KEY-AGREE
+--
+-- Describes the basic properties of a key agreement algorithm
+--
+-- &id - contains the OID identifying the key agreement algorithm
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &PublicKeySet - specifies which public keys are used with
+-- this algorithm
+-- &Ukm - type of user keying material used
+-- &ukmPresence - specifies the requirements to define the UKM field
+-- &smimeCaps - contains the object describing how the S/MIME
+-- capabilities are presented.
+--
+-- Example:
+-- kaa-dh-static-ephemeral KEY-AGREE ::= {
+-- IDENTIFIER id-alg-ESDH
+-- PARAMS TYPE KeyWrapAlgorithm ARE required
+-- PUBLIC-KEYS {
+-- {IDENTIFIER dh-public-number KEY DHPublicKey
+-- PARAMS TYPE DHDomainParameters ARE inheritable }
+-- }
+-- - - UKM should be present but is not separately ASN.1-encoded
+-- UKM ARE preferredPresent
+-- }
+
+KEY-AGREE ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &PublicKeySet PUBLIC-KEY OPTIONAL,
+ &Ukm OPTIONAL,
+ &ukmPresence ParamOptions DEFAULT absent,
+ &smimeCaps SMIME-CAPS OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [PARAMS [TYPE &Params] ARE &paramPresence]
+ [PUBLIC-KEYS &PublicKeySet]
+ [UKM [TYPE &Ukm] ARE &ukmPresence]
+ [SMIME-CAPS &smimeCaps]
+}
+
+-- KEY-WRAP
+--
+-- Describes the basic properties of a key wrap algorithm
+--
+-- &id - contains the OID identifying the key wrap algorithm
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &smimeCaps - contains the object describing how the S/MIME
+-- capabilities are presented.
+--
+-- Example:
+-- kwa-cms3DESwrap KEY-WRAP ::= {
+-- IDENTIFIER id-alg-CMS3DESwrap
+-- PARAMS TYPE NULL ARE required
+-- }
+
+KEY-WRAP ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &smimeCaps SMIME-CAPS OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [PARAMS [TYPE &Params] ARE &paramPresence]
+ [SMIME-CAPS &smimeCaps]
+}
+-- KEY-DERIVATION
+--
+-- Describes the basic properties of a key derivation algorithm
+--
+-- &id - contains the OID identifying the key derivation algorithm
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &smimeCaps - contains the object describing how the S/MIME
+-- capabilities are presented.
+--
+-- Example:
+-- kda-pbkdf2 KEY-DERIVATION ::= {
+-- IDENTIFIER id-PBKDF2
+-- PARAMS TYPE PBKDF2-params ARE required
+-- }
+
+KEY-DERIVATION ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &smimeCaps SMIME-CAPS OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [PARAMS [TYPE &Params] ARE &paramPresence]
+ [SMIME-CAPS &smimeCaps]
+}
+
+-- MAC-ALGORITHM
+--
+-- Describes the basic properties of a message
+-- authentication code (MAC) algorithm
+--
+-- &id - contains the OID identifying the MAC algorithm
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &keyed - MAC algorithm is a keyed MAC algorithm
+-- &smimeCaps - contains the object describing how the S/MIME
+-- capabilities are presented.
+--
+-- Some parameters that perhaps should have been added would be
+-- fields with the minimum and maximum MAC lengths for
+-- those MAC algorithms that allow truncations.
+--
+-- Example:
+-- maca-hmac-sha1 MAC-ALGORITHM ::= {
+-- IDENTIFIER hMAC-SHA1
+-- PARAMS TYPE NULL ARE preferredAbsent
+-- IS KEYED MAC TRUE
+-- SMIME-CAPS {IDENTIFIED BY hMAC-SHA1}
+-- }
+
+MAC-ALGORITHM ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &keyed BOOLEAN,
+ &smimeCaps SMIME-CAPS OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [PARAMS [TYPE &Params] ARE &paramPresence]
+ IS-KEYED-MAC &keyed
+ [SMIME-CAPS &smimeCaps]
+}
+
+-- CONTENT-ENCRYPTION
+--
+-- Describes the basic properties of a content encryption
+-- algorithm
+--
+-- &id - contains the OID identifying the content
+-- encryption algorithm
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &smimeCaps - contains the object describing how the S/MIME
+-- capabilities are presented.
+--
+-- Example:
+-- cea-3DES-cbc CONTENT-ENCRYPTION ::= {
+-- IDENTIFIER des-ede3-cbc
+-- PARAMS TYPE IV ARE required
+-- SMIME-CAPS { IDENTIFIED BY des-ede3-cbc }
+-- }
+
+CONTENT-ENCRYPTION ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &smimeCaps SMIME-CAPS OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [PARAMS [TYPE &Params] ARE &paramPresence]
+ [SMIME-CAPS &smimeCaps]
+}
+
+-- ALGORITHM
+--
+-- Describes a generic algorithm identifier
+--
+-- &id - contains the OID identifying the algorithm
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &smimeCaps - contains the object describing how the S/MIME
+-- capabilities are presented.
+--
+-- This would be used for cases where an algorithm of an unknown
+-- type is used. In general however, one should either define
+-- a more complete algorithm structure (such as the one above)
+-- or use the TYPE-IDENTIFIER class.
+
+ALGORITHM ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &smimeCaps SMIME-CAPS OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [PARAMS [TYPE &Params] ARE &paramPresence]
+ [SMIME-CAPS &smimeCaps]
+}
+
+-- AlgorithmIdentifier
+--
+-- Provides the generic structure that is used to encode algorithm
+-- identification and the parameters associated with the
+-- algorithm.
+--
+-- The first parameter represents the type of the algorithm being
+-- used.
+-- The second parameter represents an object set containing the
+-- algorithms that may occur in this situation.
+-- The initial list of required algorithms should occur to the
+-- left of an extension marker; all other algorithms should
+-- occur to the right of an extension marker.
+--
+-- The object class ALGORITHM can be used for generic unspecified
+-- items.
+-- If new ALGORITHM classes are defined, the fields &id and &Params
+-- need to be present as fields in the object in order to use
+-- this parameterized type.
+--
+-- Example:
+-- SignatureAlgorithmIdentifier ::=
+-- AlgorithmIdentifier{SIGNATURE-ALGORITHM, {SignatureAlgSet}}
+
+AlgorithmIdentifier{ALGORITHM-TYPE, ALGORITHM-TYPE:AlgorithmSet} ::=
+ SEQUENCE {
+ algorithm ALGORITHM-TYPE.&id({AlgorithmSet}),
+ parameters ALGORITHM-TYPE.
+ &Params({AlgorithmSet}{@algorithm}) OPTIONAL
+ }
+
+-- S/MIME Capabilities
+--
+-- We have moved the SMIME-CAPS from the module for RFC 3851 to here
+-- because it is used in RFC 4262 (X.509 Certificate Extension for
+-- S/MIME Capabilities)
+--
+--
+-- This class is used to represent an S/MIME capability. S/MIME
+-- capabilities are used to represent what algorithm capabilities
+-- an individual has. The classic example was the content encryption
+-- algorithm RC2 where the algorithm id and the RC2 key lengths
+-- supported needed to be advertised, but the IV used is not fixed.
+-- Thus, for RC2 we used
+--
+-- cap-RC2CBC SMIME-CAPS ::= {
+-- TYPE INTEGER ( 40 | 128 ) IDENTIFIED BY rc2-cbc }
+--
+-- where 40 and 128 represent the RC2 key length in number of bits.
+--
+-- Another example where information needs to be shown is for
+-- RSA-OAEP where only specific hash functions or mask generation
+-- functions are supported, but the saltLength is specified by the
+-- sender and not the recipient. In this case, one can either
+-- generate a number of capability items,
+-- or a new S/MIME capability type could be generated where
+-- multiple hash functions could be specified.
+--
+--
+-- SMIME-CAP
+--
+-- This class is used to associate the type that describes the
+-- capabilities with the object identifier.
+--
+
+SMIME-CAPS ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Type OPTIONAL
+}
+WITH SYNTAX { [TYPE &Type] IDENTIFIED BY &id }
+
+--
+-- Generic type - this is used for defining values.
+--
+
+-- Define a single S/MIME capability encoding
+
+SMIMECapability{SMIME-CAPS:CapabilitySet} ::= SEQUENCE {
+ capabilityID SMIME-CAPS.&id({CapabilitySet}),
+ parameters SMIME-CAPS.&Type({CapabilitySet}
+ {@capabilityID}) OPTIONAL
+}
+
+-- Define a sequence of S/MIME capability values
+
+SMIMECapabilities { SMIME-CAPS:CapabilitySet } ::=
+ SEQUENCE SIZE (1..MAX) OF SMIMECapability{{CapabilitySet} }
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/AttributeCertificateVersion1-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/AttributeCertificateVersion1-2009.asn1
new file mode 100644
index 0000000000..46b431af40
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/AttributeCertificateVersion1-2009.asn1
@@ -0,0 +1,59 @@
+ AttributeCertificateVersion1-2009
+ {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-v1AttrCert-02(49)}
+ DEFINITIONS EXPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ SIGNATURE-ALGORITHM, ALGORITHM, AlgorithmIdentifier{}
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ AttributeSet{}, Extensions{}, EXTENSION, ATTRIBUTE
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57) }
+
+ CertificateSerialNumber, UniqueIdentifier, SIGNED{}
+ FROM PKIX1Explicit-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51) }
+
+ GeneralNames
+ FROM PKIX1Implicit-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59) }
+
+ AttCertValidityPeriod, IssuerSerial
+ FROM PKIXAttributeCertificate-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-attribute-cert-02(47) } ;
+
+ -- Definition extracted from X.509-1997 [X.509-97], but
+ -- different type names are used to avoid collisions.
+
+ AttributeCertificateV1 ::= SIGNED{AttributeCertificateInfoV1}
+
+ AttributeCertificateInfoV1 ::= SEQUENCE {
+ version AttCertVersionV1 DEFAULT v1,
+ subject CHOICE {
+ baseCertificateID [0] IssuerSerial,
+ -- associated with a Public Key Certificate
+ subjectName [1] GeneralNames },
+ -- associated with a name
+ issuer GeneralNames,
+ signature AlgorithmIdentifier{SIGNATURE-ALGORITHM, {...}},
+ serialNumber CertificateSerialNumber,
+ attCertValidityPeriod AttCertValidityPeriod,
+ attributes SEQUENCE OF AttributeSet{{AttrList}},
+ issuerUniqueID UniqueIdentifier OPTIONAL,
+ extensions Extensions{{AttributeCertExtensionsV1}} OPTIONAL }
+
+ AttCertVersionV1 ::= INTEGER { v1(0) }
+
+ AttrList ATTRIBUTE ::= {...}
+ AttributeCertExtensionsV1 EXTENSION ::= {...}
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/AuthenticationFramework.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/AuthenticationFramework.asn1
index 5cfa9062f0..5cfa9062f0 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/AuthenticationFramework.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/AuthenticationFramework.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/BasicAccessControl.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/BasicAccessControl.asn1
index d8b2b687ae..d8b2b687ae 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/BasicAccessControl.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/BasicAccessControl.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/CertificateExtensions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/CertificateExtensions.asn1
index 0daf2208e9..0daf2208e9 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/CertificateExtensions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/CertificateExtensions.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Character-Coding-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Character-Coding-Attributes.asn1
index 04060cf060..04060cf060 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Character-Coding-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Character-Coding-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Character-Presentation-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Character-Presentation-Attributes.asn1
index aed48ac26b..aed48ac26b 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Character-Presentation-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Character-Presentation-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Character-Profile-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Character-Profile-Attributes.asn1
index 7ba5bf194a..7ba5bf194a 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Character-Profile-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Character-Profile-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Colour-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Colour-Attributes.asn1
index 24c7fafc38..24c7fafc38 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Colour-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Colour-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntax-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntax-2009.asn1
new file mode 100644
index 0000000000..3e350294be
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntax-2009.asn1
@@ -0,0 +1,463 @@
+ CryptographicMessageSyntax-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549)
+ pkcs(1) pkcs-9(9) smime(16) modules(0) id-mod-cms-2004-02(41) }
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ ParamOptions, DIGEST-ALGORITHM, SIGNATURE-ALGORITHM,
+ PUBLIC-KEY, KEY-DERIVATION, KEY-WRAP, MAC-ALGORITHM,
+ KEY-AGREE, KEY-TRANSPORT, CONTENT-ENCRYPTION, ALGORITHM,
+ AlgorithmIdentifier
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+ SignatureAlgs, MessageDigestAlgs, KeyAgreementAlgs,
+ MessageAuthAlgs, KeyWrapAlgs, ContentEncryptionAlgs,
+ KeyTransportAlgs, KeyDerivationAlgs, KeyAgreePublicKeys
+ FROM CryptographicMessageSyntaxAlgorithms-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cmsalg-2001-02(37) }
+
+ Certificate, CertificateList, CertificateSerialNumber,
+ Name, ATTRIBUTE
+ FROM PKIX1Explicit-2009
+ { iso(1) identified-organization(3) dod(6) internet(1)
+ security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-explicit-02(51) }
+
+ AttributeCertificate
+ FROM PKIXAttributeCertificate-2009
+ { iso(1) identified-organization(3) dod(6) internet(1)
+ security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-attribute-cert-02(47) }
+
+ AttributeCertificateV1
+ FROM AttributeCertificateVersion1-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-v1AttrCert-02(49) } ;
+
+ -- Cryptographic Message Syntax
+
+ -- The following are used for version numbers using the ASN.1
+ -- idiom "[[n:"
+ -- Version 1 = PKCS #7
+ -- Version 2 = S/MIME V2
+ -- Version 3 = RFC 2630
+ -- Version 4 = RFC 3369
+ -- Version 5 = RFC 3852
+
+ CONTENT-TYPE ::= TYPE-IDENTIFIER
+ ContentType ::= CONTENT-TYPE.&id
+
+ ContentInfo ::= SEQUENCE {
+ contentType CONTENT-TYPE.
+ &id({ContentSet}),
+ content [0] EXPLICIT CONTENT-TYPE.
+ &Type({ContentSet}{@contentType})}
+
+ ContentSet CONTENT-TYPE ::= {
+ -- Define the set of content types to be recognized.
+ ct-Data | ct-SignedData | ct-EncryptedData | ct-EnvelopedData |
+ ct-AuthenticatedData | ct-DigestedData, ... }
+
+ SignedData ::= SEQUENCE {
+ version CMSVersion,
+ digestAlgorithms SET OF DigestAlgorithmIdentifier,
+ encapContentInfo EncapsulatedContentInfo,
+ certificates [0] IMPLICIT CertificateSet OPTIONAL,
+ crls [1] IMPLICIT RevocationInfoChoices OPTIONAL,
+ signerInfos SignerInfos }
+
+ SignerInfos ::= SET OF SignerInfo
+
+ EncapsulatedContentInfo ::= SEQUENCE {
+ eContentType CONTENT-TYPE.&id({ContentSet}),
+ eContent [0] EXPLICIT OCTET STRING
+ ( CONTAINING CONTENT-TYPE.
+ &Type({ContentSet}{@eContentType})) OPTIONAL }
+
+ SignerInfo ::= SEQUENCE {
+ version CMSVersion,
+ sid SignerIdentifier,
+ digestAlgorithm DigestAlgorithmIdentifier,
+ signedAttrs [0] IMPLICIT SignedAttributes OPTIONAL,
+ signatureAlgorithm SignatureAlgorithmIdentifier,
+ signature SignatureValue,
+ unsignedAttrs [1] IMPLICIT Attributes
+ {{UnsignedAttributes}} OPTIONAL }
+
+ SignedAttributes ::= Attributes {{ SignedAttributesSet }}
+
+ SignerIdentifier ::= CHOICE {
+ issuerAndSerialNumber IssuerAndSerialNumber,
+ ...,
+ [[3: subjectKeyIdentifier [0] SubjectKeyIdentifier ]] }
+
+ SignedAttributesSet ATTRIBUTE ::=
+ { aa-signingTime | aa-messageDigest | aa-contentType, ... }
+
+ UnsignedAttributes ATTRIBUTE ::= { aa-countersignature, ... }
+
+ SignatureValue ::= OCTET STRING
+
+ EnvelopedData ::= SEQUENCE {
+ version CMSVersion,
+ originatorInfo [0] IMPLICIT OriginatorInfo OPTIONAL,
+ recipientInfos RecipientInfos,
+ encryptedContentInfo EncryptedContentInfo,
+ ...,
+ [[2: unprotectedAttrs [1] IMPLICIT Attributes
+ {{ UnprotectedAttributes }} OPTIONAL ]] }
+
+ OriginatorInfo ::= SEQUENCE {
+ certs [0] IMPLICIT CertificateSet OPTIONAL,
+ crls [1] IMPLICIT RevocationInfoChoices OPTIONAL }
+
+ RecipientInfos ::= SET SIZE (1..MAX) OF RecipientInfo
+
+ EncryptedContentInfo ::= SEQUENCE {
+ contentType CONTENT-TYPE.&id({ContentSet}),
+ contentEncryptionAlgorithm ContentEncryptionAlgorithmIdentifier,
+ encryptedContent [0] IMPLICIT OCTET STRING OPTIONAL }
+
+ -- If you want to do constraints, you might use:
+ -- EncryptedContentInfo ::= SEQUENCE {
+ -- contentType CONTENT-TYPE.&id({ContentSet}),
+ -- contentEncryptionAlgorithm ContentEncryptionAlgorithmIdentifier,
+ -- encryptedContent [0] IMPLICIT ENCRYPTED {CONTENT-TYPE.
+ -- &Type({ContentSet}{@contentType}) OPTIONAL }
+ -- ENCRYPTED {ToBeEncrypted} ::= OCTET STRING ( CONSTRAINED BY
+ -- { ToBeEncrypted } )
+
+ UnprotectedAttributes ATTRIBUTE ::= { ... }
+
+ RecipientInfo ::= CHOICE {
+ ktri KeyTransRecipientInfo,
+ ...,
+ [[3: kari [1] KeyAgreeRecipientInfo ]],
+ [[4: kekri [2] KEKRecipientInfo]],
+ [[5: pwri [3] PasswordRecipientInfo,
+ ori [4] OtherRecipientInfo ]] }
+
+ EncryptedKey ::= OCTET STRING
+
+ KeyTransRecipientInfo ::= SEQUENCE {
+ version CMSVersion, -- always set to 0 or 2
+ rid RecipientIdentifier,
+ keyEncryptionAlgorithm AlgorithmIdentifier
+ {KEY-TRANSPORT, {KeyTransportAlgorithmSet}},
+ encryptedKey EncryptedKey }
+
+ KeyTransportAlgorithmSet KEY-TRANSPORT ::= { KeyTransportAlgs, ... }
+
+ RecipientIdentifier ::= CHOICE {
+ issuerAndSerialNumber IssuerAndSerialNumber,
+ ...,
+ [[2: subjectKeyIdentifier [0] SubjectKeyIdentifier ]] }
+ KeyAgreeRecipientInfo ::= SEQUENCE {
+ version CMSVersion, -- always set to 3
+ originator [0] EXPLICIT OriginatorIdentifierOrKey,
+ ukm [1] EXPLICIT UserKeyingMaterial OPTIONAL,
+ keyEncryptionAlgorithm AlgorithmIdentifier
+ {KEY-AGREE, {KeyAgreementAlgorithmSet}},
+ recipientEncryptedKeys RecipientEncryptedKeys }
+
+ KeyAgreementAlgorithmSet KEY-AGREE ::= { KeyAgreementAlgs, ... }
+
+ OriginatorIdentifierOrKey ::= CHOICE {
+ issuerAndSerialNumber IssuerAndSerialNumber,
+ subjectKeyIdentifier [0] SubjectKeyIdentifier,
+ originatorKey [1] OriginatorPublicKey }
+
+ OriginatorPublicKey ::= SEQUENCE {
+ algorithm AlgorithmIdentifier {PUBLIC-KEY, {OriginatorKeySet}},
+ publicKey BIT STRING }
+
+ OriginatorKeySet PUBLIC-KEY ::= { KeyAgreePublicKeys, ... }
+
+ RecipientEncryptedKeys ::= SEQUENCE OF RecipientEncryptedKey
+
+ RecipientEncryptedKey ::= SEQUENCE {
+ rid KeyAgreeRecipientIdentifier,
+ encryptedKey EncryptedKey }
+
+ KeyAgreeRecipientIdentifier ::= CHOICE {
+ issuerAndSerialNumber IssuerAndSerialNumber,
+ rKeyId [0] IMPLICIT RecipientKeyIdentifier }
+
+ RecipientKeyIdentifier ::= SEQUENCE {
+ subjectKeyIdentifier SubjectKeyIdentifier,
+ date GeneralizedTime OPTIONAL,
+ other OtherKeyAttribute OPTIONAL }
+
+ SubjectKeyIdentifier ::= OCTET STRING
+
+ KEKRecipientInfo ::= SEQUENCE {
+ version CMSVersion, -- always set to 4
+ kekid KEKIdentifier,
+ keyEncryptionAlgorithm KeyEncryptionAlgorithmIdentifier,
+ encryptedKey EncryptedKey }
+
+ KEKIdentifier ::= SEQUENCE {
+ keyIdentifier OCTET STRING,
+ date GeneralizedTime OPTIONAL,
+ other OtherKeyAttribute OPTIONAL }
+ PasswordRecipientInfo ::= SEQUENCE {
+ version CMSVersion, -- always set to 0
+ keyDerivationAlgorithm [0] KeyDerivationAlgorithmIdentifier
+ OPTIONAL,
+ keyEncryptionAlgorithm KeyEncryptionAlgorithmIdentifier,
+ encryptedKey EncryptedKey }
+
+ OTHER-RECIPIENT ::= TYPE-IDENTIFIER
+
+ OtherRecipientInfo ::= SEQUENCE {
+ oriType OTHER-RECIPIENT.
+ &id({SupportedOtherRecipInfo}),
+ oriValue OTHER-RECIPIENT.
+ &Type({SupportedOtherRecipInfo}{@oriType})}
+
+ SupportedOtherRecipInfo OTHER-RECIPIENT ::= { ... }
+
+ DigestedData ::= SEQUENCE {
+ version CMSVersion,
+ digestAlgorithm DigestAlgorithmIdentifier,
+ encapContentInfo EncapsulatedContentInfo,
+ digest Digest, ... }
+
+ Digest ::= OCTET STRING
+
+ EncryptedData ::= SEQUENCE {
+ version CMSVersion,
+ encryptedContentInfo EncryptedContentInfo,
+ ...,
+ [[2: unprotectedAttrs [1] IMPLICIT Attributes
+ {{UnprotectedAttributes}} OPTIONAL ]] }
+
+ AuthenticatedData ::= SEQUENCE {
+ version CMSVersion,
+ originatorInfo [0] IMPLICIT OriginatorInfo OPTIONAL,
+ recipientInfos RecipientInfos,
+ macAlgorithm MessageAuthenticationCodeAlgorithm,
+ digestAlgorithm [1] DigestAlgorithmIdentifier OPTIONAL,
+ encapContentInfo EncapsulatedContentInfo,
+ authAttrs [2] IMPLICIT AuthAttributes OPTIONAL,
+ mac MessageAuthenticationCode,
+ unauthAttrs [3] IMPLICIT UnauthAttributes OPTIONAL }
+
+ AuthAttributes ::= SET SIZE (1..MAX) OF Attribute
+ {{AuthAttributeSet}}
+
+ AuthAttributeSet ATTRIBUTE ::= { aa-contentType | aa-messageDigest
+ | aa-signingTime, ...}
+ MessageAuthenticationCode ::= OCTET STRING
+
+ UnauthAttributes ::= SET SIZE (1..MAX) OF Attribute
+ {{UnauthAttributeSet}}
+
+ UnauthAttributeSet ATTRIBUTE ::= {...}
+
+ --
+ -- General algorithm definitions
+ --
+
+ DigestAlgorithmIdentifier ::= AlgorithmIdentifier
+ {DIGEST-ALGORITHM, {DigestAlgorithmSet}}
+
+ DigestAlgorithmSet DIGEST-ALGORITHM ::= {
+ CryptographicMessageSyntaxAlgorithms-2009.MessageDigestAlgs, ... }
+
+ SignatureAlgorithmIdentifier ::= AlgorithmIdentifier
+ {SIGNATURE-ALGORITHM, {SignatureAlgorithmSet}}
+
+ SignatureAlgorithmSet SIGNATURE-ALGORITHM ::=
+ { SignatureAlgs, ... }
+
+ KeyEncryptionAlgorithmIdentifier ::= AlgorithmIdentifier
+ {KEY-WRAP, {KeyEncryptionAlgorithmSet}}
+
+ KeyEncryptionAlgorithmSet KEY-WRAP ::= { KeyWrapAlgs, ... }
+
+ ContentEncryptionAlgorithmIdentifier ::= AlgorithmIdentifier
+ {CONTENT-ENCRYPTION, {ContentEncryptionAlgorithmSet}}
+
+ ContentEncryptionAlgorithmSet CONTENT-ENCRYPTION ::=
+ { ContentEncryptionAlgs, ... }
+
+ MessageAuthenticationCodeAlgorithm ::= AlgorithmIdentifier
+ {MAC-ALGORITHM, {MessageAuthenticationCodeAlgorithmSet}}
+
+ MessageAuthenticationCodeAlgorithmSet MAC-ALGORITHM ::=
+ { MessageAuthAlgs, ... }
+
+ KeyDerivationAlgorithmIdentifier ::= AlgorithmIdentifier
+ {KEY-DERIVATION, {KeyDerivationAlgs, ...}}
+
+ RevocationInfoChoices ::= SET OF RevocationInfoChoice
+
+ RevocationInfoChoice ::= CHOICE {
+ crl CertificateList,
+ ...,
+ [[5: other [1] IMPLICIT OtherRevocationInfoFormat ]] }
+
+ OTHER-REVOK-INFO ::= TYPE-IDENTIFIER
+
+ OtherRevocationInfoFormat ::= SEQUENCE {
+ otherRevInfoFormat OTHER-REVOK-INFO.
+ &id({SupportedOtherRevokInfo}),
+ otherRevInfo OTHER-REVOK-INFO.
+ &Type({SupportedOtherRevokInfo}{@otherRevInfoFormat})}
+
+ SupportedOtherRevokInfo OTHER-REVOK-INFO ::= { ... }
+
+ CertificateChoices ::= CHOICE {
+ certificate Certificate,
+ extendedCertificate [0] IMPLICIT ExtendedCertificate,
+ -- Obsolete
+ ...,
+ [[3: v1AttrCert [1] IMPLICIT AttributeCertificateV1]],
+ -- Obsolete
+ [[4: v2AttrCert [2] IMPLICIT AttributeCertificateV2]],
+ [[5: other [3] IMPLICIT OtherCertificateFormat]] }
+
+ AttributeCertificateV2 ::= AttributeCertificate
+
+ OTHER-CERT-FMT ::= TYPE-IDENTIFIER
+
+ OtherCertificateFormat ::= SEQUENCE {
+ otherCertFormat OTHER-CERT-FMT.
+ &id({SupportedCertFormats}),
+ otherCert OTHER-CERT-FMT.
+ &Type({SupportedCertFormats}{@otherCertFormat})}
+
+ SupportedCertFormats OTHER-CERT-FMT ::= { ... }
+
+ CertificateSet ::= SET OF CertificateChoices
+
+ IssuerAndSerialNumber ::= SEQUENCE {
+ issuer Name,
+ serialNumber CertificateSerialNumber }
+
+ CMSVersion ::= INTEGER { v0(0), v1(1), v2(2), v3(3), v4(4), v5(5) }
+
+ UserKeyingMaterial ::= OCTET STRING
+
+ KEY-ATTRIBUTE ::= TYPE-IDENTIFIER
+
+ OtherKeyAttribute ::= SEQUENCE {
+ keyAttrId KEY-ATTRIBUTE.
+
+ &id({SupportedKeyAttributes}),
+ keyAttr KEY-ATTRIBUTE.
+ &Type({SupportedKeyAttributes}{@keyAttrId})}
+
+ SupportedKeyAttributes KEY-ATTRIBUTE ::= { ... }
+
+ -- Content Type Object Identifiers
+
+ id-ct-contentInfo OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs9(9) smime(16) ct(1) 6 }
+
+ ct-Data CONTENT-TYPE ::= {OCTET STRING IDENTIFIED BY id-data}
+
+ id-data OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs7(7) 1 }
+
+ ct-SignedData CONTENT-TYPE ::=
+ { SignedData IDENTIFIED BY id-signedData}
+
+ id-signedData OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs7(7) 2 }
+
+ ct-EnvelopedData CONTENT-TYPE ::=
+ { EnvelopedData IDENTIFIED BY id-envelopedData}
+
+ id-envelopedData OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs7(7) 3 }
+
+ ct-DigestedData CONTENT-TYPE ::=
+ { DigestedData IDENTIFIED BY id-digestedData}
+
+ id-digestedData OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs7(7) 5 }
+
+ ct-EncryptedData CONTENT-TYPE ::=
+ { EncryptedData IDENTIFIED BY id-encryptedData}
+
+ id-encryptedData OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs7(7) 6 }
+
+ ct-AuthenticatedData CONTENT-TYPE ::=
+ { AuthenticatedData IDENTIFIED BY id-ct-authData}
+
+ id-ct-authData OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) ct(1) 2 }
+
+ --
+ -- The CMS Attributes
+ --
+
+ MessageDigest ::= OCTET STRING
+
+ SigningTime ::= Time
+
+ Time ::= CHOICE {
+ utcTime UTCTime,
+ generalTime GeneralizedTime }
+
+ Countersignature ::= SignerInfo
+
+ -- Attribute Object Identifiers
+
+ aa-contentType ATTRIBUTE ::=
+ { TYPE ContentType IDENTIFIED BY id-contentType }
+ id-contentType OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs9(9) 3 }
+
+ aa-messageDigest ATTRIBUTE ::=
+ { TYPE MessageDigest IDENTIFIED BY id-messageDigest}
+ id-messageDigest OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs9(9) 4 }
+
+ aa-signingTime ATTRIBUTE ::=
+ { TYPE SigningTime IDENTIFIED BY id-signingTime }
+ id-signingTime OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs9(9) 5 }
+
+ aa-countersignature ATTRIBUTE ::=
+ { TYPE Countersignature IDENTIFIED BY id-countersignature }
+ id-countersignature OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs9(9) 6 }
+
+ --
+ -- Obsolete Extended Certificate syntax from PKCS#6
+ --
+
+ ExtendedCertificateOrCertificate ::= CHOICE {
+ certificate Certificate,
+ extendedCertificate [0] IMPLICIT ExtendedCertificate }
+
+ ExtendedCertificate ::= SEQUENCE {
+ extendedCertificateInfo ExtendedCertificateInfo,
+ signatureAlgorithm SignatureAlgorithmIdentifier,
+ signature Signature }
+
+ ExtendedCertificateInfo ::= SEQUENCE {
+ version CMSVersion,
+ certificate Certificate,
+ attributes UnauthAttributes }
+
+ Signature ::= BIT STRING
+
+ Attribute{ ATTRIBUTE:AttrList } ::= SEQUENCE {
+ attrType ATTRIBUTE.
+ &id({AttrList}),
+ attrValues SET OF ATTRIBUTE.
+ &Type({AttrList}{@attrType}) }
+
+ Attributes { ATTRIBUTE:AttrList } ::=
+ SET SIZE (1..MAX) OF Attribute {{ AttrList }}
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntaxAlgorithms-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntaxAlgorithms-2009.asn1
new file mode 100644
index 0000000000..72e8b270db
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntaxAlgorithms-2009.asn1
@@ -0,0 +1,248 @@
+ CryptographicMessageSyntaxAlgorithms-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cmsalg-2001-02(37) }
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ ParamOptions, DIGEST-ALGORITHM, SIGNATURE-ALGORITHM,
+ PUBLIC-KEY, KEY-DERIVATION, KEY-WRAP, MAC-ALGORITHM,
+ KEY-AGREE, KEY-TRANSPORT, CONTENT-ENCRYPTION, ALGORITHM,
+ AlgorithmIdentifier{}, SMIME-CAPS
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ pk-rsa, pk-dh, pk-dsa, rsaEncryption, DHPublicKey, dhpublicnumber
+ FROM PKIXAlgs-2009
+ {iso(1) identified-organization(3) dod(6)
+ internet(1) security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-algorithms2008-02(56)}
+
+ cap-RC2CBC
+ FROM SecureMimeMessageV3dot1-2009
+ {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-msg-v3dot1-02(39)};
+
+ -- 2. Hash algorithms in this document
+
+ MessageDigestAlgs DIGEST-ALGORITHM ::= {
+ -- mda-md5 | mda-sha1,
+ ... }
+
+ -- 3. Signature algorithms in this document
+
+ SignatureAlgs SIGNATURE-ALGORITHM ::= {
+ -- See RFC 3279
+ -- sa-dsaWithSHA1 | sa-rsaWithMD5 | sa-rsaWithSHA1,
+ ... }
+
+ -- 4. Key Management Algorithms
+ -- 4.1 Key Agreement Algorithms
+
+ KeyAgreementAlgs KEY-AGREE ::= { kaa-esdh | kaa-ssdh, ...}
+ KeyAgreePublicKeys PUBLIC-KEY ::= { pk-dh, ...}
+
+ -- 4.2 Key Transport Algorithms
+
+ KeyTransportAlgs KEY-TRANSPORT ::= { kt-rsa, ... }
+
+ -- 4.3 Symmetric Key-Encryption Key Algorithms
+
+ KeyWrapAlgs KEY-WRAP ::= { kwa-3DESWrap | kwa-RC2Wrap, ... }
+
+ -- 4.4 Key Derivation Algorithms
+
+ KeyDerivationAlgs KEY-DERIVATION ::= { kda-PBKDF2, ... }
+
+ -- 5. Content Encryption Algorithms
+
+ ContentEncryptionAlgs CONTENT-ENCRYPTION ::=
+ { cea-3DES-cbc | cea-RC2-cbc, ... }
+
+ -- 6. Message Authentication Code Algorithms
+
+ MessageAuthAlgs MAC-ALGORITHM ::= { maca-hMAC-SHA1, ... }
+
+ -- S/MIME Capabilities for these items
+
+ SMimeCaps SMIME-CAPS ::= {
+ kaa-esdh.&smimeCaps |
+ kaa-ssdh.&smimeCaps |
+ kt-rsa.&smimeCaps |
+ kwa-3DESWrap.&smimeCaps |
+ kwa-RC2Wrap.&smimeCaps |
+ cea-3DES-cbc.&smimeCaps |
+ cea-RC2-cbc.&smimeCaps |
+ maca-hMAC-SHA1.&smimeCaps,
+ ...}
+
+ --
+ --
+ --
+
+ -- Algorithm Identifiers
+
+ -- rsaEncryption OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ -- us(840) rsadsi(113549) pkcs(1) pkcs-1(1) 1 }
+
+ id-alg-ESDH OBJECT IDENTIFIER ::= { iso(1) member-body(2) us(840)
+ rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) alg(3) 5 }
+
+ id-alg-SSDH OBJECT IDENTIFIER ::= { iso(1) member-body(2) us(840)
+ rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) alg(3) 10 }
+
+ id-alg-CMS3DESwrap OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) alg(3) 6 }
+
+ id-alg-CMSRC2wrap OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) alg(3) 7 }
+
+ des-ede3-cbc OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) encryptionAlgorithm(3) 7 }
+
+ rc2-cbc OBJECT IDENTIFIER ::= { iso(1) member-body(2) us(840)
+ rsadsi(113549) encryptionAlgorithm(3) 2 }
+
+ hMAC-SHA1 OBJECT IDENTIFIER ::= { iso(1) identified-organization(3)
+ dod(6) internet(1) security(5) mechanisms(5) 8 1 2 }
+
+ id-PBKDF2 OBJECT IDENTIFIER ::= { iso(1) member-body(2) us(840)
+ rsadsi(113549) pkcs(1) pkcs-5(5) 12 }
+
+ -- Algorithm Identifier Parameter Types
+
+ KeyWrapAlgorithm ::=
+ AlgorithmIdentifier {KEY-WRAP, {KeyWrapAlgs }}
+
+ RC2wrapParameter ::= RC2ParameterVersion
+ RC2ParameterVersion ::= INTEGER
+
+ CBCParameter ::= IV
+
+ IV ::= OCTET STRING -- exactly 8 octets
+
+ RC2CBCParameter ::= SEQUENCE {
+ rc2ParameterVersion INTEGER (1..256),
+ iv OCTET STRING } -- exactly 8 octets
+
+ maca-hMAC-SHA1 MAC-ALGORITHM ::= {
+ IDENTIFIER hMAC-SHA1
+ PARAMS TYPE NULL ARE preferredAbsent
+ IS-KEYED-MAC TRUE
+ SMIME-CAPS {IDENTIFIED BY hMAC-SHA1}
+ }
+
+ PBKDF2-PRFsAlgorithmIdentifier ::= AlgorithmIdentifier{ ALGORITHM,
+ {PBKDF2-PRFs} }
+
+ alg-hMAC-SHA1 ALGORITHM ::=
+ { IDENTIFIER hMAC-SHA1 PARAMS TYPE NULL ARE required }
+
+ PBKDF2-PRFs ALGORITHM ::= { alg-hMAC-SHA1, ... }
+
+ PBKDF2-SaltSources ALGORITHM ::= { ... }
+
+ PBKDF2-SaltSourcesAlgorithmIdentifier ::=
+ AlgorithmIdentifier {ALGORITHM, {PBKDF2-SaltSources}}
+
+ defaultPBKDF2 PBKDF2-PRFsAlgorithmIdentifier ::=
+ { algorithm alg-hMAC-SHA1.&id, parameters NULL:NULL }
+
+ PBKDF2-params ::= SEQUENCE {
+ salt CHOICE {
+ specified OCTET STRING,
+ otherSource PBKDF2-SaltSourcesAlgorithmIdentifier },
+ iterationCount INTEGER (1..MAX),
+ keyLength INTEGER (1..MAX) OPTIONAL,
+ prf PBKDF2-PRFsAlgorithmIdentifier DEFAULT
+ defaultPBKDF2
+ }
+
+ --
+ -- This object is included for completeness. It should not be used
+ -- for encoding of signatures, but was sometimes used in older
+ -- versions of CMS for encoding of RSA signatures.
+ --
+ --
+ -- sa-rsa SIGNATURE-ALGORITHM ::= {
+ -- IDENTIFIER rsaEncryption
+ -- - - value is not ASN.1 encoded
+ -- PARAMS TYPE NULL ARE required
+ -- HASHES {mda-sha1 | mda-md5, ...}
+ -- PUBLIC-KEYS { pk-rsa}
+ -- }
+ --
+ -- No ASN.1 encoding is applied to the signature value
+ -- for these items
+
+ kaa-esdh KEY-AGREE ::= {
+ IDENTIFIER id-alg-ESDH
+ PARAMS TYPE KeyWrapAlgorithm ARE required
+ PUBLIC-KEYS { pk-dh }
+ -- UKM is not ASN.1 encoded
+ UKM ARE optional
+ SMIME-CAPS {TYPE KeyWrapAlgorithm IDENTIFIED BY id-alg-ESDH}
+ }
+
+ kaa-ssdh KEY-AGREE ::= {
+ IDENTIFIER id-alg-SSDH
+ PARAMS TYPE KeyWrapAlgorithm ARE required
+ PUBLIC-KEYS {pk-dh}
+ -- UKM is not ASN.1 encoded
+ UKM ARE optional
+ SMIME-CAPS {TYPE KeyWrapAlgorithm IDENTIFIED BY id-alg-SSDH}
+ }
+
+ dh-public-number OBJECT IDENTIFIER ::= dhpublicnumber
+
+ pk-originator-dh PUBLIC-KEY ::= {
+ IDENTIFIER dh-public-number
+ KEY DHPublicKey
+ PARAMS ARE absent
+ CERT-KEY-USAGE {keyAgreement, encipherOnly, decipherOnly}
+ }
+
+ kwa-3DESWrap KEY-WRAP ::= {
+ IDENTIFIER id-alg-CMS3DESwrap
+ PARAMS TYPE NULL ARE required
+ SMIME-CAPS {IDENTIFIED BY id-alg-CMS3DESwrap}
+ }
+
+ kwa-RC2Wrap KEY-WRAP ::= {
+ IDENTIFIER id-alg-CMSRC2wrap
+ PARAMS TYPE RC2wrapParameter ARE required
+ SMIME-CAPS { IDENTIFIED BY id-alg-CMSRC2wrap }
+ }
+
+ kda-PBKDF2 KEY-DERIVATION ::= {
+ IDENTIFIER id-PBKDF2
+ PARAMS TYPE PBKDF2-params ARE required
+ -- No S/MIME caps defined
+ }
+
+ cea-3DES-cbc CONTENT-ENCRYPTION ::= {
+ IDENTIFIER des-ede3-cbc
+ PARAMS TYPE IV ARE required
+ SMIME-CAPS { IDENTIFIED BY des-ede3-cbc }
+ }
+
+ cea-RC2-cbc CONTENT-ENCRYPTION ::= {
+ IDENTIFIER rc2-cbc
+ PARAMS TYPE RC2CBCParameter ARE required
+ SMIME-CAPS cap-RC2CBC
+ }
+
+ kt-rsa KEY-TRANSPORT ::= {
+ IDENTIFIER rsaEncryption
+ PARAMS TYPE NULL ARE required
+ PUBLIC-KEYS { pk-rsa }
+ SMIME-CAPS {IDENTIFIED BY rsaEncryption}
+ }
+
+ -- S/MIME Capabilities - most have no label.
+
+ cap-3DESwrap SMIME-CAPS ::= { IDENTIFIED BY id-alg-CMS3DESwrap }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DOR-definition.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DOR-definition.asn1
index cd3330dc56..cd3330dc56 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DOR-definition.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DOR-definition.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DSAOperationalAttributeTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DSAOperationalAttributeTypes.asn1
index df5e8489ea..df5e8489ea 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DSAOperationalAttributeTypes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DSAOperationalAttributeTypes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Default-Value-Lists.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Default-Value-Lists.asn1
index ef1187ba8c..ef1187ba8c 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Default-Value-Lists.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Default-Value-Lists.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryAbstractService.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryAbstractService.asn1
index 5a5d310729..5a5d310729 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryAbstractService.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryAbstractService.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryAccessProtocol.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryAccessProtocol.asn1
index 10d6979f6d..10d6979f6d 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryAccessProtocol.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryAccessProtocol.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryInformationShadowProtocol.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryInformationShadowProtocol.asn1
index 91c0a865f7..91c0a865f7 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryInformationShadowProtocol.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryInformationShadowProtocol.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryOperationalBindingManagementProtocol.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryOperationalBindingManagementProtocol.asn1
index e3e1f95621..e3e1f95621 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryOperationalBindingManagementProtocol.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryOperationalBindingManagementProtocol.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryOperationalBindingTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryOperationalBindingTypes.asn1
index 9df5d2783a..9df5d2783a 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryOperationalBindingTypes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryOperationalBindingTypes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryProtectionMappings.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryProtectionMappings.asn1
index 37c6cac261..37c6cac261 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryProtectionMappings.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryProtectionMappings.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryShadowAbstractService.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryShadowAbstractService.asn1
index acbb692b6f..acbb692b6f 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryShadowAbstractService.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryShadowAbstractService.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectorySystemProtocol.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectorySystemProtocol.asn1
index cace79d109..cace79d109 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DirectorySystemProtocol.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectorySystemProtocol.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DistributedOperations.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DistributedOperations.asn1
index 72e791f10c..72e791f10c 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DistributedOperations.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DistributedOperations.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Document-Profile-Descriptor.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Document-Profile-Descriptor.asn1
index d8c15b7afa..d8c15b7afa 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Document-Profile-Descriptor.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Document-Profile-Descriptor.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/EnhancedSecurity.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/EnhancedSecurity.asn1
index 9991a59454..9991a59454 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/EnhancedSecurity.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/EnhancedSecurity.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/EnrollmentMessageSyntax-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/EnrollmentMessageSyntax-2009.asn1
new file mode 100644
index 0000000000..17a45a0a6b
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/EnrollmentMessageSyntax-2009.asn1
@@ -0,0 +1,543 @@
+ EnrollmentMessageSyntax-2009
+ {iso(1) identified-organization(3) dod(6) internet(1)
+ security(5) mechanisms(5) pkix(7) id-mod(0) id-mod-cmc2002-02(53)}
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ EXPORTS ALL;
+ IMPORTS
+
+ AttributeSet{}, Extension{}, EXTENSION, ATTRIBUTE
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57)}
+ AlgorithmIdentifier{}, DIGEST-ALGORITHM, KEY-WRAP, KEY-DERIVATION,
+ MAC-ALGORITHM, SIGNATURE-ALGORITHM, PUBLIC-KEY
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ CertificateSerialNumber, GeneralName, CRLReason, ReasonFlags,
+ CertExtensions
+ FROM PKIX1Implicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)}
+
+ Name, id-pkix, PublicKeyAlgorithms, SignatureAlgorithms
+ FROM PKIX1Explicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51)}
+
+ ContentInfo, IssuerAndSerialNumber, CONTENT-TYPE
+ FROM CryptographicMessageSyntax-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cms-2004-02(41)}
+
+ CertReqMsg, PKIPublicationInfo, CertTemplate
+ FROM PKIXCRMF-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-crmf2005-02(55)}
+
+ mda-sha1
+ FROM PKIXAlgs-2009
+ { iso(1) identified-organization(3) dod(6)
+ internet(1) security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-algorithms2008-02(56)}
+
+ kda-PBKDF2, maca-hMAC-SHA1
+ FROM CryptographicMessageSyntaxAlgorithms-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cmsalg-2001-02(37) }
+
+ mda-sha256
+ FROM PKIX1-PSS-OAEP-Algorithms-2009
+ { iso(1) identified-organization(3) dod(6)
+ internet(1) security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-rsa-pkalgs-02(54) } ;
+
+ -- CMS Content types defined in this document
+ CMC-ContentTypes CONTENT-TYPE ::= { ct-PKIData | ct-PKIResponse, ... }
+
+ -- Signature Algorithms defined in this document
+
+ SignatureAlgs SIGNATURE-ALGORITHM ::= { sa-noSignature }
+
+ -- CMS Unsigned Attributes
+
+ CMC-UnsignedAtts ATTRIBUTE ::= { aa-cmc-unsignedData }
+
+ --
+ --
+
+ id-cmc OBJECT IDENTIFIER ::= {id-pkix 7} -- CMC controls
+ id-cct OBJECT IDENTIFIER ::= {id-pkix 12} -- CMC content types
+
+ -- This is the content type for a request message in the protocol
+
+ ct-PKIData CONTENT-TYPE ::=
+ { PKIData IDENTIFIED BY id-cct-PKIData }
+ id-cct-PKIData OBJECT IDENTIFIER ::= { id-cct 2 }
+
+ PKIData ::= SEQUENCE {
+ controlSequence SEQUENCE SIZE(0..MAX) OF TaggedAttribute,
+ reqSequence SEQUENCE SIZE(0..MAX) OF TaggedRequest,
+ cmsSequence SEQUENCE SIZE(0..MAX) OF TaggedContentInfo,
+ otherMsgSequence SEQUENCE SIZE(0..MAX) OF OtherMsg
+ }
+
+ BodyPartID ::= INTEGER(0..4294967295)
+
+ TaggedAttribute ::= SEQUENCE {
+ bodyPartID BodyPartID,
+ attrType CMC-CONTROL.&id({Cmc-Control-Set}),
+ attrValues SET OF CMC-CONTROL.
+ &Type({Cmc-Control-Set}{@attrType})
+ }
+
+ Cmc-Control-Set CMC-CONTROL ::= {
+ cmc-identityProof | cmc-dataReturn | cmc-regInfo |
+ cmc-responseInfo | cmc-queryPending | cmc-popLinkRandom |
+ cmc-popLinkWitness | cmc-identification | cmc-transactionId |
+ cmc-senderNonce | cmc-recipientNonce | cmc-statusInfo |
+ cmc-addExtensions | cmc-encryptedPOP | cmc-decryptedPOP |
+ cmc-lraPOPWitness | cmc-getCert | cmc-getCRL |
+ cmc-revokeRequest | cmc-confirmCertAcceptance |
+ cmc-statusInfoV2 | cmc-trustedAnchors | cmc-authData |
+ cmc-batchRequests | cmc-batchResponses | cmc-publishCert |
+ cmc-modCertTemplate | cmc-controlProcessed |
+ cmc-identityProofV2 | cmc-popLinkWitnessV2, ... }
+
+ OTHER-REQUEST ::= TYPE-IDENTIFIER
+
+ -- We do not define any other requests in this document;
+ -- examples might be attribute certification requests
+
+ OtherRequests OTHER-REQUEST ::= {...}
+
+ TaggedRequest ::= CHOICE {
+ tcr [0] TaggedCertificationRequest,
+ crm [1] CertReqMsg,
+ orm [2] SEQUENCE {
+ bodyPartID BodyPartID,
+ requestMessageType OTHER-REQUEST.&id({OtherRequests}),
+ requestMessageValue OTHER-REQUEST.&Type({OtherRequests}
+ {@.requestMessageType})
+ }
+ }
+
+ TaggedCertificationRequest ::= SEQUENCE {
+ bodyPartID BodyPartID,
+ certificationRequest CertificationRequest
+ }
+
+ AttributeList ATTRIBUTE ::= {at-extension-req, ...}
+
+ CertificationRequest ::= SEQUENCE {
+ certificationRequestInfo SEQUENCE {
+ version INTEGER,
+ subject Name,
+ subjectPublicKeyInfo SEQUENCE {
+ algorithm AlgorithmIdentifier{PUBLIC-KEY,
+ {PublicKeyAlgorithms}},
+ subjectPublicKey BIT STRING
+ },
+ attributes [0] IMPLICIT SET OF
+ AttributeSet{{AttributeList}}
+ },
+ signatureAlgorithm AlgorithmIdentifier
+ {SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}},
+ signature BIT STRING
+ }
+
+ TaggedContentInfo ::= SEQUENCE {
+ bodyPartID BodyPartID,
+ contentInfo ContentInfo
+ }
+
+ OTHER-MSG ::= TYPE-IDENTIFIER
+
+ -- No other messages currently defined
+
+ OtherMsgSet OTHER-MSG ::= {...}
+
+ OtherMsg ::= SEQUENCE {
+ bodyPartID BodyPartID,
+ otherMsgType OTHER-MSG.&id({OtherMsgSet}),
+ otherMsgValue OTHER-MSG.&Type({OtherMsgSet}{@otherMsgType}) }
+
+ -- This defines the response message in the protocol
+
+ ct-PKIResponse CONTENT-TYPE ::=
+ { PKIResponse IDENTIFIED BY id-cct-PKIResponse }
+ id-cct-PKIResponse OBJECT IDENTIFIER ::= { id-cct 3 }
+
+ ResponseBody ::= PKIResponse
+
+ PKIResponse ::= SEQUENCE {
+ controlSequence SEQUENCE SIZE(0..MAX) OF TaggedAttribute,
+ cmsSequence SEQUENCE SIZE(0..MAX) OF TaggedContentInfo,
+ otherMsgSequence SEQUENCE SIZE(0..MAX) OF OtherMsg
+ }
+
+ CMC-CONTROL ::= TYPE-IDENTIFIER
+
+ -- The following controls have the type OCTET STRING
+
+ cmc-identityProof CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-identityProof }
+ id-cmc-identityProof OBJECT IDENTIFIER ::= {id-cmc 3}
+
+ cmc-dataReturn CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-dataReturn }
+ id-cmc-dataReturn OBJECT IDENTIFIER ::= {id-cmc 4}
+
+ cmc-regInfo CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-regInfo }
+ id-cmc-regInfo OBJECT IDENTIFIER ::= {id-cmc 18}
+
+ cmc-responseInfo CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-responseInfo }
+ id-cmc-responseInfo OBJECT IDENTIFIER ::= {id-cmc 19}
+
+ cmc-queryPending CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-queryPending }
+ id-cmc-queryPending OBJECT IDENTIFIER ::= {id-cmc 21}
+
+ cmc-popLinkRandom CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-popLinkRandom }
+ id-cmc-popLinkRandom OBJECT IDENTIFIER ::= {id-cmc 22}
+
+ cmc-popLinkWitness CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-popLinkWitness }
+ id-cmc-popLinkWitness OBJECT IDENTIFIER ::= {id-cmc 23}
+
+ -- The following controls have the type UTF8String
+
+ cmc-identification CMC-CONTROL ::=
+ { UTF8String IDENTIFIED BY id-cmc-identification }
+ id-cmc-identification OBJECT IDENTIFIER ::= {id-cmc 2}
+
+ -- The following controls have the type INTEGER
+
+ cmc-transactionId CMC-CONTROL ::=
+ { INTEGER IDENTIFIED BY id-cmc-transactionId }
+ id-cmc-transactionId OBJECT IDENTIFIER ::= {id-cmc 5}
+
+ -- The following controls have the type OCTET STRING
+
+ cmc-senderNonce CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-senderNonce }
+
+ id-cmc-senderNonce OBJECT IDENTIFIER ::= {id-cmc 6}
+
+ cmc-recipientNonce CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-recipientNonce }
+ id-cmc-recipientNonce OBJECT IDENTIFIER ::= {id-cmc 7}
+
+ -- Used to return status in a response
+
+ cmc-statusInfo CMC-CONTROL ::=
+ { CMCStatusInfo IDENTIFIED BY id-cmc-statusInfo }
+ id-cmc-statusInfo OBJECT IDENTIFIER ::= {id-cmc 1}
+
+ CMCStatusInfo ::= SEQUENCE {
+ cMCStatus CMCStatus,
+ bodyList SEQUENCE SIZE (1..MAX) OF BodyPartID,
+ statusString UTF8String OPTIONAL,
+ otherInfo CHOICE {
+ failInfo CMCFailInfo,
+ pendInfo PendInfo
+ } OPTIONAL
+ }
+
+ PendInfo ::= SEQUENCE {
+ pendToken OCTET STRING,
+ pendTime GeneralizedTime
+ }
+
+ CMCStatus ::= INTEGER {
+ success (0),
+ failed (2),
+ pending (3),
+ noSupport (4),
+ confirmRequired (5),
+ popRequired (6),
+ partial (7)
+ }
+
+ -- Note:
+ -- The spelling of unsupportedExt is corrected in this version.
+ -- In RFC 2797, it was unsuportedExt.
+
+ CMCFailInfo ::= INTEGER {
+ badAlg (0),
+ badMessageCheck (1),
+ badRequest (2),
+ badTime (3),
+ badCertId (4),
+ unsuportedExt (5),
+ mustArchiveKeys (6),
+ badIdentity (7),
+ popRequired (8),
+ popFailed (9),
+ noKeyReuse (10),
+ internalCAError (11),
+ tryLater (12),
+ authDataFail (13)
+ }
+
+ -- Used for RAs to add extensions to certification requests
+
+ cmc-addExtensions CMC-CONTROL ::=
+ { AddExtensions IDENTIFIED BY id-cmc-addExtensions }
+ id-cmc-addExtensions OBJECT IDENTIFIER ::= {id-cmc 8}
+
+ AddExtensions ::= SEQUENCE {
+ pkiDataReference BodyPartID,
+ certReferences SEQUENCE OF BodyPartID,
+ extensions SEQUENCE OF Extension{{CertExtensions}}
+ }
+
+ cmc-encryptedPOP CMC-CONTROL ::=
+ { EncryptedPOP IDENTIFIED BY id-cmc-encryptedPOP }
+ cmc-decryptedPOP CMC-CONTROL ::=
+ { DecryptedPOP IDENTIFIED BY id-cmc-decryptedPOP }
+ id-cmc-encryptedPOP OBJECT IDENTIFIER ::= {id-cmc 9}
+ id-cmc-decryptedPOP OBJECT IDENTIFIER ::= {id-cmc 10}
+
+ EncryptedPOP ::= SEQUENCE {
+ request TaggedRequest,
+ cms ContentInfo,
+ thePOPAlgID AlgorithmIdentifier{MAC-ALGORITHM, {POPAlgs}},
+ witnessAlgID AlgorithmIdentifier{DIGEST-ALGORITHM,
+ {WitnessAlgs}},
+ witness OCTET STRING
+ }
+
+ POPAlgs MAC-ALGORITHM ::= {maca-hMAC-SHA1, ...}
+ WitnessAlgs DIGEST-ALGORITHM ::= {mda-sha1, ...}
+
+ DecryptedPOP ::= SEQUENCE {
+ bodyPartID BodyPartID,
+ thePOPAlgID AlgorithmIdentifier{MAC-ALGORITHM, {POPAlgs}},
+ thePOP OCTET STRING
+ }
+
+ cmc-lraPOPWitness CMC-CONTROL ::=
+ { LraPopWitness IDENTIFIED BY id-cmc-lraPOPWitness }
+
+ id-cmc-lraPOPWitness OBJECT IDENTIFIER ::= {id-cmc 11}
+
+ LraPopWitness ::= SEQUENCE {
+ pkiDataBodyid BodyPartID,
+ bodyIds SEQUENCE OF BodyPartID
+ }
+
+ --
+
+ cmc-getCert CMC-CONTROL ::=
+ { GetCert IDENTIFIED BY id-cmc-getCert }
+ id-cmc-getCert OBJECT IDENTIFIER ::= {id-cmc 15}
+
+ GetCert ::= SEQUENCE {
+ issuerName GeneralName,
+ serialNumber INTEGER }
+
+ cmc-getCRL CMC-CONTROL ::=
+ { GetCRL IDENTIFIED BY id-cmc-getCRL }
+ id-cmc-getCRL OBJECT IDENTIFIER ::= {id-cmc 16}
+ GetCRL ::= SEQUENCE {
+ issuerName Name,
+ cRLName GeneralName OPTIONAL,
+ time GeneralizedTime OPTIONAL,
+ reasons ReasonFlags OPTIONAL }
+
+ cmc-revokeRequest CMC-CONTROL ::=
+ { RevokeRequest IDENTIFIED BY id-cmc-revokeRequest}
+ id-cmc-revokeRequest OBJECT IDENTIFIER ::= {id-cmc 17}
+
+ RevokeRequest ::= SEQUENCE {
+ issuerName Name,
+ serialNumber INTEGER,
+ reason CRLReason,
+ invalidityDate GeneralizedTime OPTIONAL,
+ passphrase OCTET STRING OPTIONAL,
+ comment UTF8String OPTIONAL }
+
+ cmc-confirmCertAcceptance CMC-CONTROL ::=
+ { CMCCertId IDENTIFIED BY id-cmc-confirmCertAcceptance }
+ id-cmc-confirmCertAcceptance OBJECT IDENTIFIER ::= {id-cmc 24}
+
+ CMCCertId ::= IssuerAndSerialNumber
+
+ -- The following is used to request v3 extensions be added
+ -- to a certificate
+
+ at-extension-req ATTRIBUTE ::=
+ { TYPE ExtensionReq IDENTIFIED BY id-ExtensionReq }
+ id-ExtensionReq OBJECT IDENTIFIER ::= {iso(1) member-body(2) us(840)
+ rsadsi(113549) pkcs(1) pkcs-9(9) 14}
+
+ ExtensionReq ::= SEQUENCE SIZE (1..MAX) OF
+ Extension{{CertExtensions}}
+
+ -- The following allows Diffie-Hellman Certification Request
+ -- Messages to be well-formed
+
+ sa-noSignature SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER id-alg-noSignature
+ VALUE NoSignatureValue
+ PARAMS TYPE NULL ARE required
+ HASHES { mda-sha1 }
+ }
+ id-alg-noSignature OBJECT IDENTIFIER ::= {id-pkix id-alg(6) 2}
+
+ NoSignatureValue ::= OCTET STRING
+ -- Unauthenticated attribute to carry removable data.
+
+ id-aa OBJECT IDENTIFIER ::= { iso(1) member-body(2) us(840)
+ rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) id-aa(2)}
+
+ aa-cmc-unsignedData ATTRIBUTE ::=
+ { TYPE CMCUnsignedData IDENTIFIED BY id-aa-cmc-unsignedData }
+ id-aa-cmc-unsignedData OBJECT IDENTIFIER ::= {id-aa 34}
+
+ CMCUnsignedData ::= SEQUENCE {
+ bodyPartPath BodyPartPath,
+ identifier TYPE-IDENTIFIER.&id,
+ content TYPE-IDENTIFIER.&Type
+ }
+
+ -- Replaces CMC Status Info
+ --
+
+ cmc-statusInfoV2 CMC-CONTROL ::=
+ { CMCStatusInfoV2 IDENTIFIED BY id-cmc-statusInfoV2 }
+ id-cmc-statusInfoV2 OBJECT IDENTIFIER ::= {id-cmc 25}
+
+ EXTENDED-FAILURE-INFO ::= TYPE-IDENTIFIER
+
+ ExtendedFailures EXTENDED-FAILURE-INFO ::= {...}
+
+ CMCStatusInfoV2 ::= SEQUENCE {
+ cMCStatus CMCStatus,
+ bodyList SEQUENCE SIZE (1..MAX) OF
+ BodyPartReference,
+ statusString UTF8String OPTIONAL,
+ otherInfo CHOICE {
+ failInfo CMCFailInfo,
+ pendInfo PendInfo,
+ extendedFailInfo [1] SEQUENCE {
+ failInfoOID TYPE-IDENTIFIER.&id
+ ({ExtendedFailures}),
+ failInfoValue TYPE-IDENTIFIER.&Type
+ ({ExtendedFailures}
+ {@.failInfoOID})
+ }
+ } OPTIONAL
+ }
+
+ BodyPartReference ::= CHOICE {
+ bodyPartID BodyPartID,
+ bodyPartPath BodyPartPath
+ }
+
+ BodyPartPath ::= SEQUENCE SIZE (1..MAX) OF BodyPartID
+
+ -- Allow for distribution of trust anchors
+ --
+
+ cmc-trustedAnchors CMC-CONTROL ::=
+ { PublishTrustAnchors IDENTIFIED BY id-cmc-trustedAnchors }
+ id-cmc-trustedAnchors OBJECT IDENTIFIER ::= {id-cmc 26}
+
+ PublishTrustAnchors ::= SEQUENCE {
+ seqNumber INTEGER,
+ hashAlgorithm AlgorithmIdentifier{DIGEST-ALGORITHM,
+ {HashAlgorithms}},
+ anchorHashes SEQUENCE OF OCTET STRING
+ }
+
+ HashAlgorithms DIGEST-ALGORITHM ::= {
+ mda-sha1 | mda-sha256, ...
+ }
+
+ cmc-authData CMC-CONTROL ::=
+ { AuthPublish IDENTIFIED BY id-cmc-authData }
+ id-cmc-authData OBJECT IDENTIFIER ::= {id-cmc 27}
+
+ AuthPublish ::= BodyPartID
+
+ -- These two items use BodyPartList
+
+ cmc-batchRequests CMC-CONTROL ::=
+ { BodyPartList IDENTIFIED BY id-cmc-batchRequests }
+ id-cmc-batchRequests OBJECT IDENTIFIER ::= {id-cmc 28}
+
+ cmc-batchResponses CMC-CONTROL ::=
+ { BodyPartList IDENTIFIED BY id-cmc-batchResponses }
+ id-cmc-batchResponses OBJECT IDENTIFIER ::= {id-cmc 29}
+
+ BodyPartList ::= SEQUENCE SIZE (1..MAX) OF BodyPartID
+
+ cmc-publishCert CMC-CONTROL ::=
+ { CMCPublicationInfo IDENTIFIED BY id-cmc-publishCert }
+ id-cmc-publishCert OBJECT IDENTIFIER ::= {id-cmc 30}
+
+ CMCPublicationInfo ::= SEQUENCE {
+ hashAlg AlgorithmIdentifier{DIGEST-ALGORITHM,
+ {HashAlgorithms}},
+ certHashes SEQUENCE OF OCTET STRING,
+ pubInfo PKIPublicationInfo
+ }
+
+ cmc-modCertTemplate CMC-CONTROL ::=
+ { ModCertTemplate IDENTIFIED BY id-cmc-modCertTemplate }
+ id-cmc-modCertTemplate OBJECT IDENTIFIER ::= {id-cmc 31}
+
+ ModCertTemplate ::= SEQUENCE {
+ pkiDataReference BodyPartPath,
+ certReferences BodyPartList,
+ replace BOOLEAN DEFAULT TRUE,
+ certTemplate CertTemplate
+ }
+
+ -- Inform follow-on servers that one or more controls have
+ -- already been processed
+
+ cmc-controlProcessed CMC-CONTROL ::=
+ { ControlsProcessed IDENTIFIED BY id-cmc-controlProcessed }
+ id-cmc-controlProcessed OBJECT IDENTIFIER ::= {id-cmc 32}
+
+ ControlsProcessed ::= SEQUENCE {
+ bodyList SEQUENCE SIZE(1..MAX) OF BodyPartReference
+ }
+
+ -- Identity Proof control w/ algorithm agility
+
+ cmc-identityProofV2 CMC-CONTROL ::=
+ { IdentityProofV2 IDENTIFIED BY id-cmc-identityProofV2 }
+ id-cmc-identityProofV2 OBJECT IDENTIFIER ::= { id-cmc 33 }
+
+ IdentityProofV2 ::= SEQUENCE {
+ proofAlgID AlgorithmIdentifier{DIGEST-ALGORITHM,
+ {WitnessAlgs}},
+ macAlgId AlgorithmIdentifier{MAC-ALGORITHM, {POPAlgs}},
+ witness OCTET STRING
+ }
+
+ cmc-popLinkWitnessV2 CMC-CONTROL ::=
+ { PopLinkWitnessV2 IDENTIFIED BY id-cmc-popLinkWitnessV2 }
+ id-cmc-popLinkWitnessV2 OBJECT IDENTIFIER ::= { id-cmc 34 }
+
+ PopLinkWitnessV2 ::= SEQUENCE {
+ keyGenAlgorithm AlgorithmIdentifier{KEY-DERIVATION,
+ {KeyDevAlgs}},
+ macAlgorithm AlgorithmIdentifier{MAC-ALGORITHM, {POPAlgs}},
+ witness OCTET STRING
+ }
+
+ KeyDevAlgs KEY-DERIVATION ::= {kda-PBKDF2, ...}
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/External-References.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/External-References.asn1
index 9a7d4936a6..9a7d4936a6 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/External-References.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/External-References.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/GULSProtectionMappings.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/GULSProtectionMappings.asn1
index 9b6a426ca2..9b6a426ca2 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/GULSProtectionMappings.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/GULSProtectionMappings.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/GenericProtectingTransferSyntax.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/GenericProtectingTransferSyntax.asn1
index c59451dcdb..c59451dcdb 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/GenericProtectingTransferSyntax.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/GenericProtectingTransferSyntax.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Coding-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Coding-Attributes.asn1
index 60acbb3b5c..60acbb3b5c 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Coding-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Coding-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Presentation-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Presentation-Attributes.asn1
index 84c1ee9851..84c1ee9851 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Presentation-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Presentation-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Profile-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Profile-Attributes.asn1
index 28daa467e1..28daa467e1 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Profile-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Profile-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/GulsSecurityExchanges.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/GulsSecurityExchanges.asn1
index 336b824174..336b824174 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/GulsSecurityExchanges.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/GulsSecurityExchanges.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/GulsSecurityTransformations.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/GulsSecurityTransformations.asn1
index db2725c37d..db2725c37d 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/GulsSecurityTransformations.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/GulsSecurityTransformations.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/HierarchicalOperationalBindings.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/HierarchicalOperationalBindings.asn1
index 4e0084b079..4e0084b079 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/HierarchicalOperationalBindings.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/HierarchicalOperationalBindings.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSAbstractService.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSAbstractService.asn1
index 3fec8ae64a..3fec8ae64a 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSAbstractService.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSAbstractService.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSAutoActionTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSAutoActionTypes.asn1
index 8c0c8138e2..8c0c8138e2 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSAutoActionTypes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSAutoActionTypes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedBodyPartTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedBodyPartTypes.asn1
index 9805a6189d..9805a6189d 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedBodyPartTypes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedBodyPartTypes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedBodyPartTypes2.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedBodyPartTypes2.asn1
index b39e03c3b6..b39e03c3b6 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedBodyPartTypes2.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedBodyPartTypes2.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedVoiceBodyPartType.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedVoiceBodyPartType.asn1
index 171f4b4223..171f4b4223 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedVoiceBodyPartType.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedVoiceBodyPartType.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSFileTransferBodyPartType.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSFileTransferBodyPartType.asn1
index 59de6d1b04..59de6d1b04 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSFileTransferBodyPartType.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSFileTransferBodyPartType.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSForwardedContentBodyPartType.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSForwardedContentBodyPartType.asn1
index 57faac6587..57faac6587 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSForwardedContentBodyPartType.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSForwardedContentBodyPartType.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSForwardedReportBodyPartType.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSForwardedReportBodyPartType.asn1
index 4e46c7679b..4e46c7679b 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSForwardedReportBodyPartType.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSForwardedReportBodyPartType.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSFunctionalObjects.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSFunctionalObjects.asn1
index 09ef4de282..09ef4de282 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSFunctionalObjects.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSFunctionalObjects.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSHeadingExtensions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSHeadingExtensions.asn1
index 752e8d05e1..752e8d05e1 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSHeadingExtensions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSHeadingExtensions.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSInformationObjects.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSInformationObjects.asn1
index 3fb0463ee7..3fb0463ee7 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSInformationObjects.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSInformationObjects.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSMessageStoreAttributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSMessageStoreAttributes.asn1
index 719bca4987..719bca4987 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSMessageStoreAttributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSMessageStoreAttributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSObjectIdentifiers.asn1
index 6e5c01ab40..6e5c01ab40 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSObjectIdentifiers.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSObjectIdentifiers.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSObjectIdentifiers2.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSObjectIdentifiers2.asn1
index 2b46b27b3e..2b46b27b3e 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSObjectIdentifiers2.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSObjectIdentifiers2.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSSecurityExtensions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSSecurityExtensions.asn1
index 8c692ccb31..8c692ccb31 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSSecurityExtensions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSSecurityExtensions.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSUpperBounds.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSUpperBounds.asn1
index 27324f614f..27324f614f 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSUpperBounds.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSUpperBounds.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/ISO-STANDARD-9541-FONT-ATTRIBUTE-SET.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/ISO-STANDARD-9541-FONT-ATTRIBUTE-SET.asn1
index b7efd7417e..b7efd7417e 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/ISO-STANDARD-9541-FONT-ATTRIBUTE-SET.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/ISO-STANDARD-9541-FONT-ATTRIBUTE-SET.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/ISO8571-FTAM.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/ISO8571-FTAM.asn1
index a57a276704..a57a276704 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/ISO8571-FTAM.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/ISO8571-FTAM.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/ISO9541-SN.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/ISO9541-SN.asn1
index 0149602040..0149602040 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/ISO9541-SN.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/ISO9541-SN.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Identifiers-and-Expressions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Identifiers-and-Expressions.asn1
index bd1d8d3c48..bd1d8d3c48 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Identifiers-and-Expressions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Identifiers-and-Expressions.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/InformationFramework.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/InformationFramework.asn1
index 813ac9c6a0..813ac9c6a0 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/InformationFramework.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/InformationFramework.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Interchange-Data-Elements.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Interchange-Data-Elements.asn1
index 2c78360b7b..2c78360b7b 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Interchange-Data-Elements.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Interchange-Data-Elements.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Layout-Descriptors.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Layout-Descriptors.asn1
index 92c887bb06..92c887bb06 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Layout-Descriptors.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Layout-Descriptors.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Link-Descriptors.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Link-Descriptors.asn1
index 64fc4436e4..64fc4436e4 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Link-Descriptors.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Link-Descriptors.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Location-Expressions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Location-Expressions.asn1
index 5de6491621..5de6491621 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Location-Expressions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Location-Expressions.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Logical-Descriptors.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Logical-Descriptors.asn1
index fab36bf12a..fab36bf12a 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Logical-Descriptors.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Logical-Descriptors.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MHSObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MHSObjectIdentifiers.asn1
index 187c3c8ad4..187c3c8ad4 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MHSObjectIdentifiers.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MHSObjectIdentifiers.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MHSProtocolObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MHSProtocolObjectIdentifiers.asn1
index 40f53b9458..40f53b9458 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MHSProtocolObjectIdentifiers.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MHSProtocolObjectIdentifiers.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSAbstractService.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSAbstractService.asn1
index 052b3b2041..052b3b2041 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MSAbstractService.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSAbstractService.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSAccessProtocol.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSAccessProtocol.asn1
index b69d72b3ed..b69d72b3ed 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MSAccessProtocol.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSAccessProtocol.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSGeneralAttributeTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSGeneralAttributeTypes.asn1
index 99d34b2883..99d34b2883 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MSGeneralAttributeTypes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSGeneralAttributeTypes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSGeneralAutoActionTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSGeneralAutoActionTypes.asn1
index eceae4ab44..eceae4ab44 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MSGeneralAutoActionTypes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSGeneralAutoActionTypes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSMatchingRules.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSMatchingRules.asn1
index 37c894da86..37c894da86 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MSMatchingRules.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSMatchingRules.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSObjectIdentifiers.asn1
index df194f838c..df194f838c 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MSObjectIdentifiers.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSObjectIdentifiers.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSUpperBounds.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSUpperBounds.asn1
index 6494fbd3ef..6494fbd3ef 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MSUpperBounds.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSUpperBounds.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MTAAbstractService.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MTAAbstractService.asn1
index 38035c77ae..38035c77ae 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MTAAbstractService.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MTAAbstractService.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MTSAbstractService.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSAbstractService.asn1
index 68a5118bc8..68a5118bc8 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MTSAbstractService.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSAbstractService.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MTSAbstractService88.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSAbstractService88.asn1
index f66d117f35..f66d117f35 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MTSAbstractService88.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSAbstractService88.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MTSAccessProtocol.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSAccessProtocol.asn1
index 03181c5951..03181c5951 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MTSAccessProtocol.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSAccessProtocol.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MTSObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSObjectIdentifiers.asn1
index 1615b241ee..1615b241ee 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MTSObjectIdentifiers.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSObjectIdentifiers.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MTSUpperBounds.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSUpperBounds.asn1
index 10eac962cb..10eac962cb 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MTSUpperBounds.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSUpperBounds.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Notation.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Notation.asn1
index 96dfc39b6a..96dfc39b6a 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Notation.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Notation.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/OCSP-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/OCSP-2009.asn1
new file mode 100644
index 0000000000..db500fe9a1
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/OCSP-2009.asn1
@@ -0,0 +1,183 @@
+ OCSP-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-ocsp-02(48)}
+ DEFINITIONS EXPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ Extensions{}, EXTENSION, ATTRIBUTE
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57)}
+
+ AlgorithmIdentifier{}, DIGEST-ALGORITHM, SIGNATURE-ALGORITHM
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ AuthorityInfoAccessSyntax, GeneralName, CrlEntryExtensions
+ FROM PKIX1Implicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)}
+
+ Name, CertificateSerialNumber, id-kp, id-ad-ocsp, Certificate
+ FROM PKIX1Explicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51)}
+
+ sa-dsaWithSHA1, sa-rsaWithMD2, sa-rsaWithMD5, sa-rsaWithSHA1
+ FROM PKIXAlgs-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-algorithms2008-02(56)};
+
+ OCSPRequest ::= SEQUENCE {
+ tbsRequest TBSRequest,
+ optionalSignature [0] EXPLICIT Signature OPTIONAL }
+
+ TBSRequest ::= SEQUENCE {
+ version [0] EXPLICIT Version DEFAULT v1,
+ requestorName [1] EXPLICIT GeneralName OPTIONAL,
+ requestList SEQUENCE OF Request,
+ requestExtensions [2] EXPLICIT Extensions {{re-ocsp-nonce |
+ re-ocsp-response, ...}} OPTIONAL }
+
+ Signature ::= SEQUENCE {
+ signatureAlgorithm AlgorithmIdentifier
+ { SIGNATURE-ALGORITHM, {...}},
+ signature BIT STRING,
+ certs [0] EXPLICIT SEQUENCE OF Certificate OPTIONAL }
+
+ Version ::= INTEGER { v1(0) }
+
+ Request ::= SEQUENCE {
+ reqCert CertID,
+ singleRequestExtensions [0] EXPLICIT Extensions
+ { {re-ocsp-service-locator,
+ ...}} OPTIONAL }
+
+ CertID ::= SEQUENCE {
+ hashAlgorithm AlgorithmIdentifier
+ {DIGEST-ALGORITHM, {...}},
+ issuerNameHash OCTET STRING, -- Hash of Issuer's DN
+ issuerKeyHash OCTET STRING, -- Hash of Issuer's public key
+ serialNumber CertificateSerialNumber }
+
+ OCSPResponse ::= SEQUENCE {
+ responseStatus OCSPResponseStatus,
+ responseBytes [0] EXPLICIT ResponseBytes OPTIONAL }
+
+ OCSPResponseStatus ::= ENUMERATED {
+ successful (0), --Response has valid confirmations
+ malformedRequest (1), --Illegal confirmation request
+ internalError (2), --Internal error in issuer
+ tryLater (3), --Try again later
+ -- (4) is not used
+ sigRequired (5), --Must sign the request
+ unauthorized (6) --Request unauthorized
+ }
+
+ RESPONSE ::= TYPE-IDENTIFIER
+
+ ResponseSet RESPONSE ::= {basicResponse, ...}
+
+ ResponseBytes ::= SEQUENCE {
+ responseType RESPONSE.
+ &id ({ResponseSet}),
+ response OCTET STRING (CONTAINING RESPONSE.
+ &Type({ResponseSet}{@responseType}))}
+
+ basicResponse RESPONSE ::=
+ { BasicOCSPResponse IDENTIFIED BY id-pkix-ocsp-basic }
+
+ BasicOCSPResponse ::= SEQUENCE {
+ tbsResponseData ResponseData,
+ signatureAlgorithm AlgorithmIdentifier{SIGNATURE-ALGORITHM,
+ {sa-dsaWithSHA1 | sa-rsaWithSHA1 |
+ sa-rsaWithMD5 | sa-rsaWithMD2, ...}},
+ signature BIT STRING,
+ certs [0] EXPLICIT SEQUENCE OF Certificate OPTIONAL }
+
+ ResponseData ::= SEQUENCE {
+ version [0] EXPLICIT Version DEFAULT v1,
+ responderID ResponderID,
+ producedAt GeneralizedTime,
+ responses SEQUENCE OF SingleResponse,
+ responseExtensions [1] EXPLICIT Extensions
+ {{re-ocsp-nonce, ...}} OPTIONAL }
+
+ ResponderID ::= CHOICE {
+ byName [1] Name,
+ byKey [2] KeyHash }
+
+ KeyHash ::= OCTET STRING --SHA-1 hash of responder's public key
+ -- (excluding the tag and length fields)
+
+ SingleResponse ::= SEQUENCE {
+ certID CertID,
+ certStatus CertStatus,
+ thisUpdate GeneralizedTime,
+ nextUpdate [0] EXPLICIT GeneralizedTime OPTIONAL,
+ singleExtensions [1] EXPLICIT Extensions{{re-ocsp-crl |
+ re-ocsp-archive-cutoff |
+ CrlEntryExtensions, ...}
+ } OPTIONAL }
+
+ CertStatus ::= CHOICE {
+ good [0] IMPLICIT NULL,
+ revoked [1] IMPLICIT RevokedInfo,
+ unknown [2] IMPLICIT UnknownInfo }
+
+ RevokedInfo ::= SEQUENCE {
+ revocationTime GeneralizedTime,
+ revocationReason [0] EXPLICIT CRLReason OPTIONAL }
+
+ UnknownInfo ::= NULL
+
+ CRLReason ::= INTEGER
+
+ ArchiveCutoff ::= GeneralizedTime
+
+ AcceptableResponses ::= SEQUENCE OF RESPONSE.&id({ResponseSet})
+
+ ServiceLocator ::= SEQUENCE {
+ issuer Name,
+ locator AuthorityInfoAccessSyntax }
+
+ CrlID ::= SEQUENCE {
+ crlUrl [0] EXPLICIT IA5String OPTIONAL,
+ crlNum [1] EXPLICIT INTEGER OPTIONAL,
+ crlTime [2] EXPLICIT GeneralizedTime OPTIONAL }
+
+ -- Request Extensions
+
+ re-ocsp-nonce EXTENSION ::= { SYNTAX OCTET STRING IDENTIFIED
+ BY id-pkix-ocsp-nonce }
+ re-ocsp-response EXTENSION ::= { SYNTAX AcceptableResponses IDENTIFIED
+ BY id-pkix-ocsp-response }
+ re-ocsp-service-locator EXTENSION ::= { SYNTAX ServiceLocator
+ IDENTIFIED BY
+ id-pkix-ocsp-service-locator }
+
+ -- Response Extensions
+
+ re-ocsp-crl EXTENSION ::= { SYNTAX CrlID IDENTIFIED BY
+ id-pkix-ocsp-crl }
+ re-ocsp-archive-cutoff EXTENSION ::= { SYNTAX ArchiveCutoff
+ IDENTIFIED BY
+ id-pkix-ocsp-archive-cutoff }
+
+ -- Object Identifiers
+
+ id-kp-OCSPSigning OBJECT IDENTIFIER ::= { id-kp 9 }
+ id-pkix-ocsp OBJECT IDENTIFIER ::= id-ad-ocsp
+ id-pkix-ocsp-basic OBJECT IDENTIFIER ::= { id-pkix-ocsp 1 }
+ id-pkix-ocsp-nonce OBJECT IDENTIFIER ::= { id-pkix-ocsp 2 }
+ id-pkix-ocsp-crl OBJECT IDENTIFIER ::= { id-pkix-ocsp 3 }
+ id-pkix-ocsp-response OBJECT IDENTIFIER ::= { id-pkix-ocsp 4 }
+ id-pkix-ocsp-nocheck OBJECT IDENTIFIER ::= { id-pkix-ocsp 5 }
+ id-pkix-ocsp-archive-cutoff OBJECT IDENTIFIER ::= { id-pkix-ocsp 6 }
+ id-pkix-ocsp-service-locator OBJECT IDENTIFIER ::= { id-pkix-ocsp 7 }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/PKCS7.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/OLD-PKCS7.asn1
index ac449b59c7..ab555200bb 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/PKCS7.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/OLD-PKCS7.asn1
@@ -3,7 +3,7 @@
-- This Annex contains a module of PKCS#7 ASN.1 definitions conforming to current ASN.1 standards rather than the obsolescent (and now deprecated) 1988/90 version of ASN.1 used in version 1.5 of PKCS#7.
-- Extensions to PKCS#7 defined in RFC 2630 are included.
-- If differences are found between the ASN.1 in the following module and that in PKCS#7, the latter is definitive.
-PKCS7 {iso member-body usa(840) rsadsi(113549) pkcs(1) 7
+OLD-PKCS7 {iso member-body usa(840) rsadsi(113549) pkcs(1) 7
module(0) -- module not currently defined in PKCS#7 --} DEFINITIONS IMPLICIT
TAGS ::=
BEGIN
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/ObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/ObjectIdentifiers.asn1
index b4f91f50c5..b4f91f50c5 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/ObjectIdentifiers.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/ObjectIdentifiers.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/OperationalBindingManagement.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/OperationalBindingManagement.asn1
index 2044feb155..2044feb155 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/OperationalBindingManagement.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/OperationalBindingManagement.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-10.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-10.asn1
new file mode 100644
index 0000000000..a5fd0fefb9
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-10.asn1
@@ -0,0 +1,56 @@
+ PKCS-10
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkcs10-2009(69)}
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ AlgorithmIdentifier{}, DIGEST-ALGORITHM, SIGNATURE-ALGORITHM,
+ PUBLIC-KEY
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ ATTRIBUTE, Name
+ FROM PKIX1Explicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51)};
+
+ -- Certificate requests
+ CertificationRequestInfo ::= SEQUENCE {
+ version INTEGER { v1(0) } (v1, ... ),
+ subject Name,
+ subjectPKInfo SubjectPublicKeyInfo{{ PKInfoAlgorithms }},
+ attributes [0] Attributes{{ CRIAttributes }}
+ }
+
+ SubjectPublicKeyInfo {PUBLIC-KEY: IOSet} ::= SEQUENCE {
+ algorithm AlgorithmIdentifier {PUBLIC-KEY, {IOSet}},
+ subjectPublicKey BIT STRING
+ }
+
+ PKInfoAlgorithms PUBLIC-KEY ::= {
+ ... -- add any locally defined algorithms here -- }
+
+ Attributes { ATTRIBUTE:IOSet } ::= SET OF Attribute{{ IOSet }}
+
+ CRIAttributes ATTRIBUTE ::= {
+ ... -- add any locally defined attributes here -- }
+
+ Attribute { ATTRIBUTE:IOSet } ::= SEQUENCE {
+ type ATTRIBUTE.&id({IOSet}),
+ values SET SIZE(1..MAX) OF ATTRIBUTE.&Type({IOSet}{@type})
+ }
+
+ CertificationRequest ::= SEQUENCE {
+ certificationRequestInfo CertificationRequestInfo,
+ signatureAlgorithm AlgorithmIdentifier{SIGNATURE-ALGORITHM,
+ { SignatureAlgorithms }},
+ signature BIT STRING
+ }
+
+ SignatureAlgorithms SIGNATURE-ALGORITHM ::= {
+ ... -- add any locally defined algorithms here -- }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-12.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-12.asn1
new file mode 100644
index 0000000000..5b37a552f9
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-12.asn1
@@ -0,0 +1,174 @@
+PKCS-12 {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1)
+ pkcs-12(12) modules(0) pkcs-12(1)}
+
+-- $Revision$
+
+DEFINITIONS IMPLICIT TAGS ::=
+
+BEGIN
+
+-- EXPORTS ALL
+-- All types and values defined in this module is exported for use in
+-- other ASN.1 modules.
+
+IMPORTS
+
+informationFramework
+ FROM UsefulDefinitions {joint-iso-itu-t(2) ds(5) module(1)
+ usefulDefinitions(0) 3}
+
+ATTRIBUTE
+ FROM InformationFramework informationFramework
+
+ContentInfo, DigestInfo
+ FROM PKCS-7 {iso(1) member-body(2) us(840) rsadsi(113549)
+ pkcs(1) pkcs-7(7) modules(0) pkcs-7(1)}
+
+PrivateKeyInfo, EncryptedPrivateKeyInfo
+ FROM PKCS-8 {iso(1) member-body(2) us(840) rsadsi(113549)
+ pkcs(1) pkcs-8(8) modules(1) pkcs-8(1)}
+
+pkcs-9, friendlyName, localKeyId, certTypes, crlTypes
+ FROM PKCS-9 {iso(1) member-body(2) us(840) rsadsi(113549)
+ pkcs(1) pkcs-9(9) modules(0) pkcs-9(1)};
+
+-- Object identifiers
+
+rsadsi OBJECT IDENTIFIER ::= {iso(1) member-body(2) us(840) rsadsi(113549)}
+pkcs OBJECT IDENTIFIER ::= {rsadsi pkcs(1)}
+pkcs-12 OBJECT IDENTIFIER ::= {pkcs 12}
+pkcs-12PbeIds OBJECT IDENTIFIER ::= {pkcs-12 1}
+pbeWithSHAAnd128BitRC4 OBJECT IDENTIFIER ::= {pkcs-12PbeIds 1}
+pbeWithSHAAnd40BitRC4 OBJECT IDENTIFIER ::= {pkcs-12PbeIds 2}
+pbeWithSHAAnd3-KeyTripleDES-CBC OBJECT IDENTIFIER ::= {pkcs-12PbeIds 3}
+pbeWithSHAAnd2-KeyTripleDES-CBC OBJECT IDENTIFIER ::= {pkcs-12PbeIds 4}
+pbeWithSHAAnd128BitRC2-CBC OBJECT IDENTIFIER ::= {pkcs-12PbeIds 5}
+pbewithSHAAnd40BitRC2-CBC OBJECT IDENTIFIER ::= {pkcs-12PbeIds 6}
+
+bagtypes OBJECT IDENTIFIER ::= {pkcs-12 10 1}
+
+-- The PFX PDU
+
+PFX ::= SEQUENCE {
+ version INTEGER {v3(3)}(v3,...),
+ authSafe ContentInfo,
+ macData MacData OPTIONAL
+}
+
+MacData ::= SEQUENCE {
+ mac DigestInfo,
+ macSalt OCTET STRING,
+ iterations INTEGER DEFAULT 1
+-- Note: The default is for historical reasons and its use is
+-- deprecated. A higher value, like 1024 is recommended.
+}
+
+AuthenticatedSafe ::= SEQUENCE OF ContentInfo
+ -- Data if unencrypted
+ -- EncryptedData if password-encrypted
+ -- EnvelopedData if public key-encrypted
+
+SafeContents ::= SEQUENCE OF SafeBag
+
+SafeBag ::= SEQUENCE {
+ bagId BAG-TYPE.&id ({PKCS12BagSet}),
+ bagValue [0] EXPLICIT BAG-TYPE.&Type({PKCS12BagSet}{@bagId}),
+ bagAttributes SET OF PKCS12Attribute OPTIONAL
+}
+
+-- Bag types
+
+keyBag BAG-TYPE ::=
+ {KeyBag IDENTIFIED BY {bagtypes 1}}
+pkcs8ShroudedKeyBag BAG-TYPE ::=
+ {PKCS8ShroudedKeyBag IDENTIFIED BY {bagtypes 2}}
+certBag BAG-TYPE ::=
+ {CertBag IDENTIFIED BY {bagtypes 3}}
+crlBag BAG-TYPE ::=
+ {CRLBag IDENTIFIED BY {bagtypes 4}}
+secretBag BAG-TYPE ::=
+ {SecretBag IDENTIFIED BY {bagtypes 5}}
+safeContentsBag BAG-TYPE ::=
+ {SafeContents IDENTIFIED BY {bagtypes 6}}
+
+PKCS12BagSet BAG-TYPE ::= {
+ keyBag |
+ pkcs8ShroudedKeyBag |
+ certBag |
+ crlBag |
+ secretBag |
+ safeContentsBag,
+ ... -- For future extensions
+}
+
+BAG-TYPE ::= TYPE-IDENTIFIER
+
+-- KeyBag
+
+KeyBag ::= PrivateKeyInfo
+
+-- Shrouded KeyBag
+
+PKCS8ShroudedKeyBag ::= EncryptedPrivateKeyInfo
+
+-- CertBag
+
+CertBag ::= SEQUENCE {
+ certId BAG-TYPE.&id ({CertTypes}),
+ certValue [0] EXPLICIT BAG-TYPE.&Type ({CertTypes}{@certId})
+}
+
+x509Certificate BAG-TYPE ::=
+ {OCTET STRING IDENTIFIED BY {certTypes 1}}
+ -- DER-encoded X.509 certificate stored in OCTET STRING
+sdsiCertificate BAG-TYPE ::=
+ {IA5String IDENTIFIED BY {certTypes 2}}
+ -- Base64-encoded SDSI certificate stored in IA5String
+
+CertTypes BAG-TYPE ::= {
+ x509Certificate |
+ sdsiCertificate,
+ ... -- For future extensions
+}
+
+-- CRLBag
+
+CRLBag ::= SEQUENCE {
+ crlId BAG-TYPE.&id ({CRLTypes}),
+ crlValue [0] EXPLICIT BAG-TYPE.&Type ({CRLTypes}{@crlId})
+}
+
+x509CRL BAG-TYPE ::=
+ {OCTET STRING IDENTIFIED BY {crlTypes 1}}
+ -- DER-encoded X.509 CRL stored in OCTET STRING
+
+CRLTypes BAG-TYPE ::= {
+ x509CRL,
+ ... -- For future extensions
+}
+
+-- Secret Bag
+
+SecretBag ::= SEQUENCE {
+ secretTypeId BAG-TYPE.&id ({SecretTypes}),
+ secretValue [0] EXPLICIT BAG-TYPE.&Type ({SecretTypes}{@secretTypeId})
+}
+
+SecretTypes BAG-TYPE ::= {
+ ... -- For future extensions
+}
+
+-- Attributes
+
+PKCS12Attribute ::= SEQUENCE {
+ attrId ATTRIBUTE.&id ({PKCS12AttrSet}),
+ attrValues SET OF ATTRIBUTE.&Type ({PKCS12AttrSet}{@attrId})
+} -- This type is compatible with the X.500 type 'Attribute'
+
+PKCS12AttrSet ATTRIBUTE ::= {
+ friendlyName |
+ localKeyId,
+ ... -- Other attributes are allowed
+}
+
+END \ No newline at end of file
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-5.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-5.asn1
new file mode 100644
index 0000000000..91b0dc36bf
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-5.asn1
@@ -0,0 +1,202 @@
+-- PKCS #5 v2.1 ASN.1 Module
+-- Revised October 27, 2012
+
+-- This module has been checked for conformance with the
+-- ASN.1 standard by the OSS ASN.1 Tools
+
+PKCS-5 {
+ iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-5(5) modules(16)
+ pkcs5v2-1(2)}
+
+DEFINITIONS EXPLICIT TAGS ::=
+
+BEGIN
+
+-- ============================
+-- Basic object identifiers
+-- ============================
+
+nistAlgorithms OBJECT IDENTIFIER ::=
+ {joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101) csor(3) 4}
+oiw OBJECT IDENTIFIER ::= {iso(1) identified-organization(3) 14}
+rsadsi OBJECT IDENTIFIER ::= {iso(1) member-body(2) us(840) 113549}
+pkcs OBJECT IDENTIFIER ::= {rsadsi 1}
+pkcs-5 OBJECT IDENTIFIER ::= {pkcs 5}
+
+
+-- ============================
+-- Basic types and classes
+-- ============================
+
+AlgorithmIdentifier { ALGORITHM-IDENTIFIER:InfoObjectSet } ::= SEQUENCE {
+ algorithm ALGORITHM-IDENTIFIER.&id({InfoObjectSet}),
+ parameters ALGORITHM-IDENTIFIER.&Type({InfoObjectSet} {@algorithm}) OPTIONAL
+}
+
+ALGORITHM-IDENTIFIER ::= TYPE-IDENTIFIER
+
+
+-- ============================
+-- PBKDF2
+-- ============================
+
+PBKDF2Algorithms ALGORITHM-IDENTIFIER ::=
+ { {PBKDF2-params IDENTIFIED BY id-PBKDF2}, ...}
+
+id-PBKDF2 OBJECT IDENTIFIER ::= {pkcs-5 12}
+
+algid-hmacWithSHA1 AlgorithmIdentifier {{PBKDF2-PRFs}} ::=
+ {algorithm id-hmacWithSHA1, parameters NULL : NULL}
+
+PBKDF2-params ::= SEQUENCE {
+ salt CHOICE {
+ specified OCTET STRING,
+ otherSource AlgorithmIdentifier {{PBKDF2-SaltSources}}
+ },
+ iterationCount INTEGER (1..MAX),
+ keyLength INTEGER (1..MAX) OPTIONAL,
+ prf AlgorithmIdentifier {{PBKDF2-PRFs}} DEFAULT algid-hmacWithSHA1
+}
+
+PBKDF2-SaltSources ALGORITHM-IDENTIFIER ::= { ... }
+
+PBKDF2-PRFs ALGORITHM-IDENTIFIER ::= {
+ {NULL IDENTIFIED BY id-hmacWithSHA1} |
+ {NULL IDENTIFIED BY id-hmacWithSHA224} |
+ {NULL IDENTIFIED BY id-hmacWithSHA256} |
+ {NULL IDENTIFIED BY id-hmacWithSHA384} |
+ {NULL IDENTIFIED BY id-hmacWithSHA512} |
+ {NULL IDENTIFIED BY id-hmacWithSHA512-224} |
+ {NULL IDENTIFIED BY id-hmacWithSHA512-256},
+ ...
+}
+
+
+-- ============================
+ -- PBES1
+-- ============================
+
+PBES1Algorithms ALGORITHM-IDENTIFIER ::= {
+ {PBEParameter IDENTIFIED BY pbeWithMD2AndDES-CBC} |
+ {PBEParameter IDENTIFIED BY pbeWithMD2AndRC2-CBC} |
+ {PBEParameter IDENTIFIED BY pbeWithMD5AndDES-CBC} |
+ {PBEParameter IDENTIFIED BY pbeWithMD5AndRC2-CBC} |
+ {PBEParameter IDENTIFIED BY pbeWithSHA1AndDES-CBC} |
+ {PBEParameter IDENTIFIED BY pbeWithSHA1AndRC2-CBC},
+ ...
+}
+
+pbeWithMD2AndDES-CBC OBJECT IDENTIFIER ::= {pkcs-5 1}
+pbeWithMD2AndRC2-CBC OBJECT IDENTIFIER ::= {pkcs-5 4}
+pbeWithMD5AndDES-CBC OBJECT IDENTIFIER ::= {pkcs-5 3}
+pbeWithMD5AndRC2-CBC OBJECT IDENTIFIER ::= {pkcs-5 6}
+pbeWithSHA1AndDES-CBC OBJECT IDENTIFIER ::= {pkcs-5 10}
+pbeWithSHA1AndRC2-CBC OBJECT IDENTIFIER ::= {pkcs-5 11}
+
+PBEParameter ::= SEQUENCE {
+ salt OCTET STRING (SIZE(8)),
+ iterationCount INTEGER
+}
+
+
+-- ============================
+-- PBES2
+-- ============================
+
+PBES2Algorithms ALGORITHM-IDENTIFIER ::= {
+ {PBES2-params IDENTIFIED BY id-PBES2},
+ ...
+}
+
+id-PBES2 OBJECT IDENTIFIER ::= {pkcs-5 13}
+
+PBES2-params ::= SEQUENCE {
+ keyDerivationFunc AlgorithmIdentifier {{PBES2-KDFs}},
+ encryptionScheme AlgorithmIdentifier {{PBES2-Encs}}
+}
+
+PBES2-KDFs ALGORITHM-IDENTIFIER ::= {
+ {PBKDF2-params IDENTIFIED BY id-PBKDF2},
+ ...
+}
+
+PBES2-Encs ALGORITHM-IDENTIFIER ::= { ... }
+
+
+-- ============================
+-- PBMAC1
+-- ============================
+
+PBMAC1Algorithms ALGORITHM-IDENTIFIER ::= {
+ {PBMAC1-params IDENTIFIED BY id-PBMAC1},
+ ...
+}
+
+id-PBMAC1 OBJECT IDENTIFIER ::= {pkcs-5 14}
+
+PBMAC1-params ::= SEQUENCE {
+ keyDerivationFunc AlgorithmIdentifier {{PBMAC1-KDFs}},
+ messageAuthScheme AlgorithmIdentifier {{PBMAC1-MACs}}
+}
+
+PBMAC1-KDFs ALGORITHM-IDENTIFIER ::= {
+ {PBKDF2-params IDENTIFIED BY id-PBKDF2},
+ ...
+}
+
+PBMAC1-MACs ALGORITHM-IDENTIFIER ::= { ... }
+
+-- ============================
+-- Supporting techniques
+-- ============================
+
+digestAlgorithm OBJECT IDENTIFIER ::= {rsadsi 2}
+encryptionAlgorithm OBJECT IDENTIFIER ::= {rsadsi 3}
+
+SupportingAlgorithms ALGORITHM-IDENTIFIER ::= {
+ {NULL IDENTIFIED BY id-hmacWithSHA1} |
+ {OCTET STRING (SIZE(8)) IDENTIFIED BY desCBC} |
+ {OCTET STRING (SIZE(8)) IDENTIFIED BY des-EDE3-CBC} |
+ {RC2-CBC-Parameter IDENTIFIED BY rc2CBC} |
+ {RC5-CBC-Parameters IDENTIFIED BY rc5-CBC-PAD} |
+ {OCTET STRING (SIZE(16)) IDENTIFIED BY aes128-CBC-PAD} |
+ {OCTET STRING (SIZE(16)) IDENTIFIED BY aes192-CBC-PAD} |
+ {OCTET STRING (SIZE(16)) IDENTIFIED BY aes256-CBC-PAD},
+ ...
+}
+
+id-hmacWithSHA1 OBJECT IDENTIFIER ::= {digestAlgorithm 7}
+id-hmacWithSHA224 OBJECT IDENTIFIER ::= {digestAlgorithm 8}
+id-hmacWithSHA256 OBJECT IDENTIFIER ::= {digestAlgorithm 9}
+id-hmacWithSHA384 OBJECT IDENTIFIER ::= {digestAlgorithm 10}
+id-hmacWithSHA512 OBJECT IDENTIFIER ::= {digestAlgorithm 11}
+id-hmacWithSHA512-224 OBJECT IDENTIFIER ::= {digestAlgorithm 12}
+id-hmacWithSHA512-256 OBJECT IDENTIFIER ::= {digestAlgorithm 13}
+
+-- from OIW
+desCBC OBJECT IDENTIFIER ::= {oiw secsig(3) algorithms(2) 7}
+
+des-EDE3-CBC OBJECT IDENTIFIER ::= {encryptionAlgorithm 7}
+
+rc2CBC OBJECT IDENTIFIER ::= {encryptionAlgorithm 2}
+
+RC2-CBC-Parameter ::= SEQUENCE {
+ rc2ParameterVersion INTEGER OPTIONAL,
+ iv OCTET STRING (SIZE(8))
+}
+
+rc5-CBC-PAD OBJECT IDENTIFIER ::= {encryptionAlgorithm 9}
+
+RC5-CBC-Parameters ::= SEQUENCE {
+ version INTEGER {v1-0(16)} (v1-0),
+ rounds INTEGER (8..127),
+ blockSizeInBits INTEGER (64 | 128),
+ iv OCTET STRING OPTIONAL
+}
+
+aes OBJECT IDENTIFIER ::= { nistAlgorithms 1 }
+aes128-CBC-PAD OBJECT IDENTIFIER ::= { aes 2 }
+aes192-CBC-PAD OBJECT IDENTIFIER ::= { aes 22 }
+aes256-CBC-PAD OBJECT IDENTIFIER ::= { aes 42 }
+
+END \ No newline at end of file
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-7.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-7.asn1
new file mode 100644
index 0000000000..4cea8db240
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-7.asn1
@@ -0,0 +1,326 @@
+PKCS-7 {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-7(7)
+ modules(0) pkcs-7(1)}
+
+DEFINITIONS EXPLICIT TAGS ::=
+BEGIN
+
+--
+-- 3. Definitions
+--
+
+-- EXPORTS All;
+
+IMPORTS
+
+informationFramework, authenticationFramework
+ FROM UsefulDefinitions {joint-iso-itu-t ds(5) module(1)
+ usefulDefinitions(0) 3}
+
+ Name, ATTRIBUTE
+ FROM InformationFramework informationFramework
+
+ ALGORITHM, Certificate, CertificateSerialNumber,
+ CertificateList
+ FROM AuthenticationFramework authenticationFramework
+
+ contentType, messageDigest, signingTime, counterSignature
+ FROM PKCS-9 {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1)
+ pkcs-9(9) modules(0) pkcs-9(1)};
+--
+-- 6. Useful types
+--
+
+-- Also defined in X.509
+-- Redeclared here as a parameterized type
+AlgorithmIdentifier {ALGORITHM:IOSet} ::= SEQUENCE {
+ algorithm ALGORITHM.&id({IOSet}),
+ parameters ALGORITHM.&Type({IOSet}{@algorithm}) OPTIONAL
+}
+
+-- Also defined in X.501
+-- Redeclared here as a parameterized type
+Attribute { ATTRIBUTE:IOSet } ::= SEQUENCE {
+ type ATTRIBUTE.&id({IOSet}),
+ values SET SIZE (1..MAX) OF ATTRIBUTE.&Type({IOSet}{@type})
+}
+
+CertificateRevocationLists ::=
+ SET OF CertificateList
+
+Certificates ::=
+ SEQUENCE OF Certificate
+
+CRLSequence ::=
+ SEQUENCE OF CertificateList
+
+ContentEncryptionAlgorithmIdentifier ::=
+ AlgorithmIdentifier {{ContentEncryptionAlgorithms}}
+
+ContentEncryptionAlgorithms ALGORITHM ::= {
+ ... -- add any application-specific algorithms here
+}
+
+DigestAlgorithmIdentifier ::=
+ AlgorithmIdentifier {{DigestAlgorithms}}
+
+DigestAlgorithms ALGORITHM ::= {
+ ... -- add any application-specific algorithms here
+}
+
+DigestEncryptionAlgorithmIdentifier ::=
+ AlgorithmIdentifier {{DigestEncryptionAlgorithms}}
+
+DigestEncryptionAlgorithms ALGORITHM ::= {
+ ... -- add any application-specific algorithms here
+}
+
+ExtendedCertificateOrCertificate ::= CHOICE {
+ certificate Certificate, -- X.509
+ extendedCertificate [0] IMPLICIT ExtendedCertificate -- PKCS#6
+}
+
+ExtendedCertificate ::= Certificate -- cheating
+
+ExtendedCertificatesAndCertificates ::=
+ SET OF ExtendedCertificateOrCertificate
+
+IssuerAndSerialNumber ::= SEQUENCE {
+ issuer Name,
+ serialNumber CertificateSerialNumber
+}
+
+KeyEncryptionAlgorithmIdentifier ::=
+ AlgorithmIdentifier {{KeyEncryptionAlgorithms}}
+
+KeyEncryptionAlgorithms ALGORITHM ::= {
+ ... -- add any application-specific algorithms here
+}
+
+--
+-- 7. General syntax
+--
+
+ContentInfo ::= SEQUENCE {
+ contentType ContentType,
+ content [0] EXPLICIT CONTENTS.&Type({Contents}{@contentType})
+OPTIONAL
+}
+
+CONTENTS ::= TYPE-IDENTIFIER
+
+Contents CONTENTS ::= {
+ {Data IDENTIFIED BY data} |
+ {SignedData IDENTIFIED BY signedData} |
+ {EnvelopedData IDENTIFIED BY envelopedData} |
+ {SignedAndEnvelopedData IDENTIFIED BY signedAndEnvelopedData} |
+ {DigestedData IDENTIFIED BY digestedData} |
+ {EncryptedData IDENTIFIED BY encryptedData},
+ ... -- add any application-specific types/contents here
+}
+
+ContentType ::= CONTENTS.&id({Contents})
+
+--
+-- 8. Data content type
+--
+
+Data ::= OCTET STRING
+
+--
+-- 9. Signed-data content type
+--
+
+SignedData ::= SEQUENCE {
+ version INTEGER {sdVer1(1), sdVer2(2)} (sdVer1 | sdVer2),
+ digestAlgorithms
+ DigestAlgorithmIdentifiers,
+ contentInfo ContentInfo,
+ certificates CHOICE {
+ certSet [0] IMPLICIT ExtendedCertificatesAndCertificates,
+ certSequence [2] IMPLICIT Certificates
+ } OPTIONAL,
+ crls CHOICE {
+ crlSet [1] IMPLICIT CertificateRevocationLists,
+ crlSequence [3] IMPLICIT CRLSequence
+ } OPTIONAL,
+ signerInfos SignerInfos
+} (WITH COMPONENTS { ..., version (sdVer1),
+ digestAlgorithms (WITH COMPONENTS { ..., daSet PRESENT }),
+ certificates (WITH COMPONENTS { ..., certSequence ABSENT }),
+ crls (WITH COMPONENTS { ..., crlSequence ABSENT }),
+ signerInfos (WITH COMPONENTS { ..., siSet PRESENT })
+ } |
+ WITH COMPONENTS { ..., version (sdVer2),
+ digestAlgorithms (WITH COMPONENTS { ..., daSequence PRESENT }),
+ certificates (WITH COMPONENTS { ..., certSet ABSENT }),
+ crls (WITH COMPONENTS { ..., crlSet ABSENT }),
+ signerInfos (WITH COMPONENTS { ..., siSequence PRESENT })
+})
+
+SignerInfos ::= CHOICE {
+ siSet SET OF SignerInfo,
+ siSequence SEQUENCE OF SignerInfo
+}
+
+DigestAlgorithmIdentifiers ::= CHOICE {
+ daSet SET OF DigestAlgorithmIdentifier,
+ daSequence SEQUENCE OF DigestAlgorithmIdentifier
+}
+
+SignerInfo ::= SEQUENCE {
+ version INTEGER {siVer1(1), siVer2(2)} (siVer1 | siVer2),
+ issuerAndSerialNumber
+ IssuerAndSerialNumber,
+ digestAlgorithm DigestAlgorithmIdentifier,
+ authenticatedAttributes CHOICE {
+ aaSet [0] IMPLICIT SET OF Attribute {{Authenticated}},
+ aaSequence [2] EXPLICIT SEQUENCE OF Attribute {{Authenticated}}
+ -- Explicit because easier to compute digest on sequence of attributes and then reuse
+ -- encoded sequence in aaSequence.
+ } OPTIONAL,
+ digestEncryptionAlgorithm
+ DigestEncryptionAlgorithmIdentifier,
+ encryptedDigest EncryptedDigest,
+ unauthenticatedAttributes CHOICE {
+ uaSet [1] IMPLICIT SET OF Attribute {{Unauthenticated}},
+ uaSequence [3] IMPLICIT SEQUENCE OF Attribute {{Unauthenticated}}
+ } OPTIONAL
+} (WITH COMPONENTS { ..., version (siVer1),
+ authenticatedAttributes (WITH COMPONENTS { ..., aaSequence ABSENT }),
+ unauthenticatedAttributes (WITH COMPONENTS { ..., uaSequence ABSENT })
+} | WITH COMPONENTS { ..., version (siVer2),
+ authenticatedAttributes (WITH COMPONENTS { ..., aaSet ABSENT }),
+ unauthenticatedAttributes (WITH COMPONENTS { ..., uaSet ABSENT })
+})
+
+Authenticated ATTRIBUTE ::= {
+ contentType |
+ messageDigest,
+ ..., -- add application-specific attributes here
+ signingTime
+}
+
+Unauthenticated ATTRIBUTE ::= {
+ ..., -- add application-specific attributes here
+ counterSignature
+}
+
+EncryptedDigest ::= OCTET STRING
+
+DigestInfo ::= SEQUENCE {
+ digestAlgorithm DigestAlgorithmIdentifier,
+ digest Digest
+}
+
+Digest ::= OCTET STRING
+
+--
+-- 10. Enveloped-data content type
+--
+
+EnvelopedData ::= SEQUENCE {
+ version INTEGER {edVer0(0), edVer1(1)} (edVer0 | edVer1),
+ recipientInfos RecipientInfos,
+ encryptedContentInfo
+ EncryptedContentInfo
+} (WITH COMPONENTS { ..., version (edVer0),
+ recipientInfos (WITH COMPONENTS { ..., riSet PRESENT })
+} | WITH COMPONENTS { ..., version (edVer1),
+ recipientInfos (WITH COMPONENTS { ..., riSequence PRESENT })
+})
+
+RecipientInfos ::= CHOICE {
+ riSet SET OF RecipientInfo,
+ riSequence SEQUENCE OF RecipientInfo
+}
+
+EncryptedContentInfo ::= SEQUENCE {
+ contentType ContentType,
+ contentEncryptionAlgorithm
+ ContentEncryptionAlgorithmIdentifier,
+ encryptedContent
+ [0] IMPLICIT EncryptedContent OPTIONAL
+}
+
+EncryptedContent ::= OCTET STRING
+
+RecipientInfo ::= SEQUENCE {
+ version INTEGER {riVer0(0)} (riVer0),
+ issuerAndSerialNumber
+ IssuerAndSerialNumber,
+ keyEncryptionAlgorithm
+ KeyEncryptionAlgorithmIdentifier,
+ encryptedKey EncryptedKey
+}
+
+EncryptedKey ::= OCTET STRING
+
+--
+-- 11. Signed-and-enveloped-data content type
+--
+
+SignedAndEnvelopedData ::= SEQUENCE {
+ version INTEGER {seVer1(1), seVer2(2)} (seVer1 | seVer2),
+ recipientInfos RecipientInfos,
+ digestAlgorithms
+ DigestAlgorithmIdentifiers,
+ encryptedContentInfo
+ EncryptedContentInfo,
+ certificates CHOICE {
+ certSet [0] IMPLICIT ExtendedCertificatesAndCertificates,
+ certSequence [2] IMPLICIT Certificates
+ } OPTIONAL,
+ crls CHOICE {
+ crlSet [1] IMPLICIT CertificateRevocationLists,
+ crlSequence [3] IMPLICIT CRLSequence
+ } OPTIONAL,
+ signerInfos SignerInfos
+} (WITH COMPONENTS { ..., version (seVer1),
+ recipientInfos (WITH COMPONENTS { ..., riSet PRESENT }),
+ digestAlgorithms (WITH COMPONENTS { ..., daSet PRESENT }),
+ certificates (WITH COMPONENTS { ..., certSequence ABSENT }),
+ crls (WITH COMPONENTS { ..., crlSequence ABSENT }),
+ signerInfos (WITH COMPONENTS { ..., siSet PRESENT })
+} |
+ WITH COMPONENTS { ..., version (seVer2),
+ recipientInfos (WITH COMPONENTS { ..., riSequence PRESENT }),
+ digestAlgorithms (WITH COMPONENTS { ..., daSequence PRESENT }),
+ certificates (WITH COMPONENTS { ..., certSet ABSENT }),
+ crls (WITH COMPONENTS { ..., crlSet ABSENT }),
+ signerInfos (WITH COMPONENTS { ..., siSequence PRESENT })
+})
+
+--
+-- 12. Digested-data content type
+--
+
+DigestedData ::= SEQUENCE {
+ version INTEGER {ddVer0(0)} (ddVer0),
+ digestAlgorithm DigestAlgorithmIdentifier,
+ contentInfo ContentInfo,
+ digest Digest
+}
+
+--
+-- 13. Encrypted-data content type
+--
+
+EncryptedData ::= SEQUENCE {
+ version INTEGER {edVer0(0)} (edVer0),
+ encryptedContentInfo EncryptedContentInfo
+}
+
+--
+-- 14. Object Identifiers
+--
+
+pkcs-7 OBJECT IDENTIFIER ::=
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) 7 }
+data OBJECT IDENTIFIER ::= { pkcs-7 1 }
+signedData OBJECT IDENTIFIER ::= { pkcs-7 2 }
+envelopedData OBJECT IDENTIFIER ::= { pkcs-7 3 }
+signedAndEnvelopedData OBJECT IDENTIFIER ::= { pkcs-7 4 }
+digestedData OBJECT IDENTIFIER ::= { pkcs-7 5 }
+encryptedData OBJECT IDENTIFIER ::= { pkcs-7 6 }
+
+END \ No newline at end of file
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-8.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-8.asn1
new file mode 100644
index 0000000000..266f90170a
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-8.asn1
@@ -0,0 +1,61 @@
+PKCS-8 {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-8(8)
+ modules(1) pkcs-8(1)}
+
+-- $Revision: 1.5 $
+
+-- This module has been checked for conformance with the ASN.1
+-- standard by the OSS ASN.1 Tools
+
+DEFINITIONS IMPLICIT TAGS ::=
+
+BEGIN
+
+-- EXPORTS All --
+-- All types and values defined in this module is exported for use in other
+-- ASN.1 modules.
+
+IMPORTS
+
+informationFramework
+ FROM UsefulDefinitions {joint-iso-itu-t(2) ds(5) module(1)
+ usefulDefinitions(0) 3}
+
+Attribute
+ FROM InformationFramework informationFramework
+
+AlgorithmIdentifier, ALGORITHM-IDENTIFIER
+ FROM PKCS-5 {iso(1) member-body(2) us(840) rsadsi(113549)
+ pkcs(1) pkcs-5(5) modules(16) pkcs-5(1)};
+
+-- Private-key information syntax
+
+PrivateKeyInfo ::= SEQUENCE {
+ version Version,
+ privateKeyAlgorithm AlgorithmIdentifier {{PrivateKeyAlgorithms}},
+ privateKey PrivateKey,
+ attributes [0] Attributes OPTIONAL }
+
+Version ::= INTEGER {v1(0)} (v1,...)
+
+PrivateKey ::= OCTET STRING
+
+Attributes ::= SET OF Attribute
+
+-- Encrypted private-key information syntax
+
+EncryptedPrivateKeyInfo ::= SEQUENCE {
+ encryptionAlgorithm AlgorithmIdentifier {{KeyEncryptionAlgorithms}},
+ encryptedData EncryptedData
+}
+
+EncryptedData ::= OCTET STRING
+
+PrivateKeyAlgorithms ALGORITHM-IDENTIFIER ::= {
+ ... -- For local profiles
+}
+
+KeyEncryptionAlgorithms ALGORITHM-IDENTIFIER ::= {
+ ... -- For local profiles
+}
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-9.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-9.asn1
new file mode 100644
index 0000000000..cd561f4d7e
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-9.asn1
@@ -0,0 +1,391 @@
+PKCS-9 {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1)
+pkcs-9(9) modules(0) pkcs-9(1)}
+
+-- $Revision$
+
+DEFINITIONS IMPLICIT TAGS ::=
+
+BEGIN
+
+-- EXPORTS All --
+-- All types and values defined in this module is exported for use in
+-- other ASN.1 modules.
+
+IMPORTS
+
+informationFramework, authenticationFramework, selectedAttributeTypes,
+ upperBounds , id-at
+ FROM UsefulDefinitions {joint-iso-itu-t ds(5) module(1)
+ usefulDefinitions(0) 3}
+
+ub-name
+ FROM UpperBounds upperBounds
+
+OBJECT-CLASS, ATTRIBUTE, MATCHING-RULE, Attribute, top, objectIdentifierMatch
+ FROM InformationFramework informationFramework
+
+ALGORITHM, Extensions, Time
+ FROM AuthenticationFramework authenticationFramework
+
+DirectoryString, octetStringMatch, caseIgnoreMatch, caseExactMatch,
+ generalizedTimeMatch, integerMatch, serialNumber
+ FROM SelectedAttributeTypes selectedAttributeTypes
+
+ContentInfo, SignerInfo
+ FROM CryptographicMessageSyntax-2009 {iso(1) member-body(2) us(840)
+ rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) modules(0) cms(1)}
+
+EncryptedPrivateKeyInfo
+ FROM PKCS-8 {iso(1) member-body(2) us(840) rsadsi(113549)
+ pkcs(1) pkcs-8(8) modules(1) pkcs-8(1)}
+
+PFX
+ FROM PKCS-12 {iso(1) member-body(2) us(840) rsadsi(113549)
+ pkcs(1) pkcs-12(12) modules(0) pkcs-12(1)}
+
+-- PKCS15Token
+-- FROM PKCS-15 {iso(1) member-body(2) us(840) rsadsi(113549)
+-- pkcs(1) pkcs-15(15) modules(1) pkcs-15(1)}
+;
+
+-- Upper bounds
+pkcs-9-ub-pkcs9String INTEGER ::= 255
+pkcs-9-ub-emailAddress INTEGER ::= pkcs-9-ub-pkcs9String
+pkcs-9-ub-unstructuredName INTEGER ::= pkcs-9-ub-pkcs9String
+pkcs-9-ub-unstructuredAddress INTEGER ::= pkcs-9-ub-pkcs9String
+pkcs-9-ub-challengePassword INTEGER ::= pkcs-9-ub-pkcs9String
+pkcs-9-ub-friendlyName INTEGER ::= pkcs-9-ub-pkcs9String
+pkcs-9-ub-signingDescription INTEGER ::= pkcs-9-ub-pkcs9String
+pkcs-9-ub-match INTEGER ::= pkcs-9-ub-pkcs9String
+pkcs-9-ub-pseudonym INTEGER ::= ub-name
+pkcs-9-ub-placeOfBirth INTEGER ::= ub-name
+
+-- Object Identifiers
+
+pkcs-9 OBJECT IDENTIFIER ::= {iso(1) member-body(2) us(840)
+ rsadsi(113549) pkcs(1) 9}
+
+ -- Main arcs
+pkcs-9-mo OBJECT IDENTIFIER ::= {pkcs-9 0} -- Modules branch
+pkcs-9-oc OBJECT IDENTIFIER ::= {pkcs-9 24} -- Object class branch
+pkcs-9-at OBJECT IDENTIFIER ::= {pkcs-9 25} -- Attribute branch, for new attributes
+pkcs-9-sx OBJECT IDENTIFIER ::= {pkcs-9 26} -- For syntaxes (RFC 2252)
+pkcs-9-mr OBJECT IDENTIFIER ::= {pkcs-9 27} -- Matching rules
+
+ -- Object classes
+pkcs-9-oc-pkcsEntity OBJECT IDENTIFIER ::= {pkcs-9-oc 1}
+pkcs-9-oc-naturalPerson OBJECT IDENTIFIER ::= {pkcs-9-oc 2}
+
+ -- Attributes
+pkcs-9-at-emailAddress OBJECT IDENTIFIER ::= {pkcs-9 1}
+pkcs-9-at-unstructuredName OBJECT IDENTIFIER ::= {pkcs-9 2}
+pkcs-9-at-contentType OBJECT IDENTIFIER ::= {pkcs-9 3}
+pkcs-9-at-messageDigest OBJECT IDENTIFIER ::= {pkcs-9 4}
+pkcs-9-at-signingTime OBJECT IDENTIFIER ::= {pkcs-9 5}
+pkcs-9-at-counterSignature OBJECT IDENTIFIER ::= {pkcs-9 6}
+pkcs-9-at-challengePassword OBJECT IDENTIFIER ::= {pkcs-9 7}
+pkcs-9-at-unstructuredAddress OBJECT IDENTIFIER ::= {pkcs-9 8}
+pkcs-9-at-extendedCertificateAttributes OBJECT IDENTIFIER ::= {pkcs-9 9}
+
+-- Obsolete (?) attribute identifiers, purportedly from "tentative
+-- PKCS #9 draft"
+-- pkcs-9-at-issuerAndSerialNumber OBJECT IDENTIFIER ::= {pkcs-9 10}
+-- pkcs-9-at-passwordCheck OBJECT IDENTIFIER ::= {pkcs-9 11}
+-- pkcs-9-at-publicKey OBJECT IDENTIFIER ::= {pkcs-9 12}
+
+pkcs-9-at-signingDescription OBJECT IDENTIFIER ::= {pkcs-9 13}
+pkcs-9-at-extensionRequest OBJECT IDENTIFIER ::= {pkcs-9 14}
+pkcs-9-at-smimeCapabilities OBJECT IDENTIFIER ::= {pkcs-9 15}
+
+-- Unused (?)
+-- pkcs-9-at-? OBJECT IDENTIFIER ::= {pkcs-9 17}
+-- pkcs-9-at-? OBJECT IDENTIFIER ::= {pkcs-9 18}
+-- pkcs-9-at-? OBJECT IDENTIFIER ::= {pkcs-9 19}
+
+pkcs-9-at-friendlyName OBJECT IDENTIFIER ::= {pkcs-9 20}
+pkcs-9-at-localKeyId OBJECT IDENTIFIER ::= {pkcs-9 21}
+pkcs-9-at-userPKCS12 OBJECT IDENTIFIER ::= {2 16 840 1 113730 3 1 216}
+pkcs-9-at-pkcs15Token OBJECT IDENTIFIER ::= {pkcs-9-at 1}
+pkcs-9-at-encryptedPrivateKeyInfo OBJECT IDENTIFIER ::= {pkcs-9-at 2}
+pkcs-9-at-randomNonce OBJECT IDENTIFIER ::= {pkcs-9-at 3}
+pkcs-9-at-sequenceNumber OBJECT IDENTIFIER ::= {pkcs-9-at 4}
+pkcs-9-at-pkcs7PDU OBJECT IDENTIFIER ::= {pkcs-9-at 5}
+
+ -- IETF PKIX Attribute branch
+ietf-at OBJECT IDENTIFIER ::= {1 3 6 1 5 5 7 9}
+
+pkcs-9-at-dateOfBirth OBJECT IDENTIFIER ::= {ietf-at 1}
+pkcs-9-at-placeOfBirth OBJECT IDENTIFIER ::= {ietf-at 2}
+pkcs-9-at-gender OBJECT IDENTIFIER ::= {ietf-at 3}
+pkcs-9-at-countryOfCitizenship OBJECT IDENTIFIER ::= {ietf-at 4}
+pkcs-9-at-countryOfResidence OBJECT IDENTIFIER ::= {ietf-at 5}
+
+ -- Syntaxes (for use with LDAP accessible directories)
+pkcs-9-sx-pkcs9String OBJECT IDENTIFIER ::= {pkcs-9-sx 1}
+pkcs-9-sx-signingTime OBJECT IDENTIFIER ::= {pkcs-9-sx 2}
+
+ -- Matching rules
+pkcs-9-mr-caseIgnoreMatch OBJECT IDENTIFIER ::= {pkcs-9-mr 1}
+pkcs-9-mr-signingTimeMatch OBJECT IDENTIFIER ::= {pkcs-9-mr 2}
+
+ -- Arcs with attributes defined elsewhere
+smime OBJECT IDENTIFIER ::= {pkcs-9 16}
+ -- Main arc for S/MIME (RFC 2633)
+certTypes OBJECT IDENTIFIER ::= {pkcs-9 22}
+ -- Main arc for certificate types defined in PKCS #12
+crlTypes OBJECT IDENTIFIER ::= {pkcs-9 23}
+ -- Main arc for crl types defined in PKCS #12
+
+ -- Other object identifiers
+id-at-pseudonym OBJECT IDENTIFIER ::= {id-at 65}
+
+-- Useful types
+
+PKCS9String {INTEGER : maxSize} ::= CHOICE {
+ ia5String IA5String (SIZE(1..maxSize)),
+ directoryString DirectoryString {maxSize}
+}
+
+-- Object classes
+
+pkcsEntity OBJECT-CLASS ::= {
+ SUBCLASS OF { top }
+ KIND auxiliary
+ MAY CONTAIN { PKCSEntityAttributeSet }
+ ID pkcs-9-oc-pkcsEntity
+}
+
+naturalPerson OBJECT-CLASS ::= {
+ SUBCLASS OF { top }
+ KIND auxiliary
+ MAY CONTAIN { NaturalPersonAttributeSet }
+ ID pkcs-9-oc-naturalPerson
+}
+
+-- Attribute sets
+
+PKCSEntityAttributeSet ATTRIBUTE ::= {
+ pKCS7PDU |
+ userPKCS12 |
+-- pKCS15Token |
+ encryptedPrivateKeyInfo,
+ ... -- For future extensions
+}
+
+NaturalPersonAttributeSet ATTRIBUTE ::= {
+ emailAddress |
+ unstructuredName |
+ unstructuredAddress |
+ dateOfBirth |
+ placeOfBirth |
+ gender |
+ countryOfCitizenship |
+ countryOfResidence |
+ pseudonym |
+ serialNumber,
+ ... -- For future extensions
+}
+
+-- Attributes
+
+pKCS7PDU ATTRIBUTE ::= {
+ WITH SYNTAX ContentInfo
+ ID pkcs-9-at-pkcs7PDU
+}
+
+userPKCS12 ATTRIBUTE ::= {
+ WITH SYNTAX PFX
+ ID pkcs-9-at-userPKCS12
+}
+
+-- pKCS15Token ATTRIBUTE ::= {
+-- WITH SYNTAX PKCS15Token
+-- ID pkcs-9-at-pkcs15Token
+-- }
+
+encryptedPrivateKeyInfo ATTRIBUTE ::= {
+ WITH SYNTAX EncryptedPrivateKeyInfo
+ ID pkcs-9-at-encryptedPrivateKeyInfo
+}
+
+emailAddress ATTRIBUTE ::= {
+ WITH SYNTAX IA5String (SIZE(1..pkcs-9-ub-emailAddress))
+ EQUALITY MATCHING RULE pkcs9CaseIgnoreMatch
+ ID pkcs-9-at-emailAddress
+}
+
+unstructuredName ATTRIBUTE ::= {
+ WITH SYNTAX PKCS9String {pkcs-9-ub-unstructuredName}
+ EQUALITY MATCHING RULE pkcs9CaseIgnoreMatch
+ ID pkcs-9-at-unstructuredName
+}
+
+unstructuredAddress ATTRIBUTE ::= {
+ WITH SYNTAX DirectoryString {pkcs-9-ub-unstructuredAddress}
+ EQUALITY MATCHING RULE caseIgnoreMatch
+ ID pkcs-9-at-unstructuredAddress
+}
+
+dateOfBirth ATTRIBUTE ::= {
+ WITH SYNTAX GeneralizedTime
+ EQUALITY MATCHING RULE generalizedTimeMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-dateOfBirth
+}
+
+placeOfBirth ATTRIBUTE ::= {
+ WITH SYNTAX DirectoryString {pkcs-9-ub-placeOfBirth}
+ EQUALITY MATCHING RULE caseExactMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-placeOfBirth
+}
+
+gender ATTRIBUTE ::= {
+ WITH SYNTAX PrintableString (SIZE(1) ^ FROM ("M" | "F" | "m" | "f"))
+ EQUALITY MATCHING RULE caseIgnoreMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-gender
+}
+
+countryOfCitizenship ATTRIBUTE ::= {
+ WITH SYNTAX PrintableString (SIZE(2))(CONSTRAINED BY {
+ -- Must be a two-letter country acronym in accordance with
+ -- ISO/IEC 3166 --})
+ EQUALITY MATCHING RULE caseIgnoreMatch
+ ID pkcs-9-at-countryOfCitizenship
+}
+
+countryOfResidence ATTRIBUTE ::= {
+ WITH SYNTAX PrintableString (SIZE(2))(CONSTRAINED BY {
+ -- Must be a two-letter country acronym in accordance with
+ -- ISO/IEC 3166 --})
+ EQUALITY MATCHING RULE caseIgnoreMatch
+ ID pkcs-9-at-countryOfResidence
+}
+
+pseudonym ATTRIBUTE ::= {
+ WITH SYNTAX DirectoryString {pkcs-9-ub-pseudonym}
+ EQUALITY MATCHING RULE caseExactMatch
+ ID id-at-pseudonym
+}
+
+contentType ATTRIBUTE ::= {
+ WITH SYNTAX ContentType
+ EQUALITY MATCHING RULE objectIdentifierMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-contentType
+}
+
+ContentType ::= OBJECT IDENTIFIER
+
+messageDigest ATTRIBUTE ::= {
+ WITH SYNTAX MessageDigest
+ EQUALITY MATCHING RULE octetStringMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-messageDigest
+}
+
+MessageDigest ::= OCTET STRING
+
+signingTime ATTRIBUTE ::= {
+ WITH SYNTAX SigningTime
+ EQUALITY MATCHING RULE signingTimeMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-signingTime
+}
+
+SigningTime ::= Time -- imported from ISO/IEC 9594-8
+
+randomNonce ATTRIBUTE ::= {
+ WITH SYNTAX RandomNonce
+ EQUALITY MATCHING RULE octetStringMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-randomNonce
+}
+
+RandomNonce ::= OCTET STRING (SIZE(4..MAX)) -- At least four bytes long
+
+sequenceNumber ATTRIBUTE ::= {
+ WITH SYNTAX SequenceNumber
+ EQUALITY MATCHING RULE integerMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-sequenceNumber
+}
+
+SequenceNumber ::= INTEGER (1..MAX)
+
+counterSignature ATTRIBUTE ::= {
+ WITH SYNTAX SignerInfo
+ ID pkcs-9-at-counterSignature
+}
+
+challengePassword ATTRIBUTE ::= {
+ WITH SYNTAX DirectoryString {pkcs-9-ub-challengePassword}
+ EQUALITY MATCHING RULE caseExactMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-challengePassword
+}
+
+extensionRequest ATTRIBUTE ::= {
+ WITH SYNTAX ExtensionRequest
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-extensionRequest
+}
+
+ExtensionRequest ::= Extensions
+
+extendedCertificateAttributes ATTRIBUTE ::= {
+ WITH SYNTAX SET OF Attribute
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-extendedCertificateAttributes
+}
+
+friendlyName ATTRIBUTE ::= {
+ WITH SYNTAX BMPString (SIZE(1..pkcs-9-ub-friendlyName))
+ EQUALITY MATCHING RULE caseIgnoreMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-friendlyName
+}
+
+localKeyId ATTRIBUTE ::= {
+ WITH SYNTAX OCTET STRING
+ EQUALITY MATCHING RULE octetStringMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-localKeyId
+}
+
+signingDescription ATTRIBUTE ::= {
+ WITH SYNTAX DirectoryString {pkcs-9-ub-signingDescription}
+ EQUALITY MATCHING RULE caseIgnoreMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-signingDescription
+}
+
+smimeCapabilities ATTRIBUTE ::= {
+ WITH SYNTAX SMIMECapabilities
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-smimeCapabilities
+}
+
+SMIMECapabilities ::= SEQUENCE OF SMIMECapability
+
+SMIMECapability ::= SEQUENCE {
+ algorithm ALGORITHM.&id ({SMIMEv3Algorithms}),
+ parameters ALGORITHM.&Type ({SMIMEv3Algorithms}{@algorithm})
+}
+
+SMIMEv3Algorithms ALGORITHM ::= {...-- See RFC 2633 --}
+
+ -- Matching rules
+
+pkcs9CaseIgnoreMatch MATCHING-RULE ::= {
+ SYNTAX PKCS9String {pkcs-9-ub-match}
+ ID pkcs-9-mr-caseIgnoreMatch
+}
+
+signingTimeMatch MATCHING-RULE ::= {
+ SYNTAX SigningTime
+ ID pkcs-9-mr-signingTimeMatch
+}
+
+END \ No newline at end of file
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/PKCS7BodyPartType.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS7BodyPartType.asn1
index 525ee3c5ec..1bcc2281a1 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/PKCS7BodyPartType.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS7BodyPartType.asn1
@@ -6,7 +6,7 @@ BEGIN
IMPORTS
-- PKCS#7
ContentInfo
- FROM PKCS7 {iso(1) member-body(2) usa(840) rsadsi(113549) pkcs(1)
+ FROM PKCS-7 {iso(1) member-body(2) usa(840) rsadsi(113549) pkcs(1)
7 module(0)}
-- module not formally defined in the PKCS#7document, therefore defined in Annex O
-- IPMS Information Objects
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-CommonTypes-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-CommonTypes-2009.asn1
new file mode 100644
index 0000000000..fde5bddbf3
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-CommonTypes-2009.asn1
@@ -0,0 +1,166 @@
+ PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57)}
+
+ DEFINITIONS EXPLICIT TAGS ::=
+ BEGIN
+
+ -- ATTRIBUTE
+ --
+ -- Describe the set of data associated with an attribute of some type
+ --
+ -- &id is an OID identifying the attribute
+ -- &Type is the ASN.1 type structure for the attribute; not all
+ -- attributes have a data structure, so this field is optional
+ -- &minCount contains the minimum number of times the attribute can
+ -- occur in an AttributeSet
+ -- &maxCount contains the maximum number of times the attribute can
+ -- appear in an AttributeSet
+ -- Note: this cannot be automatically enforced as the field
+ -- cannot be defaulted to MAX.
+ -- &equality-match contains information about how matching should be
+ -- done
+ --
+ -- Currently we are using two different prefixes for attributes.
+ --
+ -- at- for certificate attributes
+ -- aa- for CMS attributes
+ --
+
+ ATTRIBUTE ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Type OPTIONAL,
+ &equality-match MATCHING-RULE OPTIONAL,
+ &minCount INTEGER DEFAULT 1,
+ &maxCount INTEGER OPTIONAL
+ } WITH SYNTAX {
+ [TYPE &Type]
+ [EQUALITY MATCHING RULE &equality-match]
+ [COUNTS [MIN &minCount] [MAX &maxCount]]
+ IDENTIFIED BY &id
+ }
+
+ -- Specification of MATCHING-RULE information object class
+ --
+
+ MATCHING-RULE ::= CLASS {
+ &ParentMatchingRules MATCHING-RULE OPTIONAL,
+ &AssertionType OPTIONAL,
+ &uniqueMatchIndicator ATTRIBUTE OPTIONAL,
+ &id OBJECT IDENTIFIER UNIQUE
+ }
+ WITH SYNTAX {
+ [PARENT &ParentMatchingRules]
+ [SYNTAX &AssertionType]
+ [UNIQUE-MATCH-INDICATOR &uniqueMatchIndicator]
+ ID &id
+ }
+
+ -- AttributeSet
+ --
+ -- Used when a set of attributes is to occur.
+ --
+ -- type contains the identifier of the attribute
+ -- values contains a set of values where the structure of the ASN.1
+ -- is defined by the attribute
+ --
+ -- The parameter contains the set of objects describing
+ -- those attributes that can occur in this location.
+ --
+
+ AttributeSet{ATTRIBUTE:AttrSet} ::= SEQUENCE {
+ type ATTRIBUTE.&id({AttrSet}),
+ values SET SIZE (1..MAX) OF ATTRIBUTE.
+ &Type({AttrSet}{@type})
+ }
+
+ -- SingleAttribute
+ --
+ -- Used for a single valued attribute
+ --
+ -- The parameter contains the set of objects describing the
+ -- attributes that can occur in this location
+ --
+
+ SingleAttribute{ATTRIBUTE:AttrSet} ::= SEQUENCE {
+ type ATTRIBUTE.&id({AttrSet}),
+ value ATTRIBUTE.&Type({AttrSet}{@type})
+ }
+
+ -- EXTENSION
+ --
+ -- This class definition is used to describe the association of
+ -- object identifier and ASN.1 type structure for extensions
+ --
+ -- All extensions are prefixed with ext-
+ --
+ -- &id contains the object identifier for the extension
+ -- &ExtnType specifies the ASN.1 type structure for the extension
+ -- &Critical contains the set of legal values for the critical field.
+ -- This is normally {TRUE|FALSE} but in some instances may be
+ -- restricted to just one of these values.
+ --
+
+ EXTENSION ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &ExtnType,
+ &Critical BOOLEAN DEFAULT {TRUE | FALSE }
+ } WITH SYNTAX {
+ SYNTAX &ExtnType IDENTIFIED BY &id
+ [CRITICALITY &Critical]
+ }
+
+ -- Extensions
+ --
+ -- Used for a sequence of extensions.
+ --
+ -- The parameter contains the set of legal extensions that can
+ -- occur in this sequence.
+ --
+
+ Extensions{EXTENSION:ExtensionSet} ::=
+ SEQUENCE SIZE (1..MAX) OF Extension{{ExtensionSet}}
+
+ -- Extension
+ --
+ -- Used for a single extension
+ --
+ -- The parameter contains the set of legal extensions that can
+ -- occur in this extension.
+ --
+ -- The restriction on the critical field has been commented out
+ -- the authors are not completely sure it is correct.
+ -- The restriction could be done using custom code rather than
+ -- compiler-generated code, however.
+ --
+
+ Extension{EXTENSION:ExtensionSet} ::= SEQUENCE {
+ extnID EXTENSION.&id({ExtensionSet}),
+ critical BOOLEAN
+ -- (EXTENSION.&Critical({ExtensionSet}{@extnID}))
+ DEFAULT FALSE,
+ extnValue OCTET STRING (CONTAINING
+ EXTENSION.&ExtnType({ExtensionSet}{@extnID}))
+ -- contains the DER encoding of the ASN.1 value
+ -- corresponding to the extension type identified
+ -- by extnID
+ }
+
+ -- Security Category
+ --
+ -- Security categories are used both for specifying clearances and
+ -- for labeling objects. We move this here from RFC 3281 so that
+ -- they will use a common single object class to express this
+ -- information.
+ --
+
+ SECURITY-CATEGORY ::= TYPE-IDENTIFIER
+
+ SecurityCategory{SECURITY-CATEGORY:Supported} ::= SEQUENCE {
+ type [0] IMPLICIT SECURITY-CATEGORY.
+ &id({Supported}),
+ value [1] EXPLICIT SECURITY-CATEGORY.
+ &Type({Supported}{@type})
+ }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-X400Address-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-X400Address-2009.asn1
new file mode 100644
index 0000000000..41cbaea67e
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-X400Address-2009.asn1
@@ -0,0 +1,300 @@
+ --
+ -- This module is used to isolate all the X.400 naming information.
+ -- There is no reason to expect this to occur in a PKIX certificate.
+ --
+
+ PKIX-X400Address-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-x400address-02(60) }
+ DEFINITIONS EXPLICIT TAGS ::=
+ BEGIN
+
+ -- X.400 address syntax starts here
+
+ ORAddress ::= SEQUENCE {
+ built-in-standard-attributes BuiltInStandardAttributes,
+ built-in-domain-defined-attributes
+ BuiltInDomainDefinedAttributes OPTIONAL,
+
+ -- see also teletex-domain-defined-attributes
+ extension-attributes ExtensionAttributes OPTIONAL }
+
+ -- Built-in Standard Attributes
+
+ BuiltInStandardAttributes ::= SEQUENCE {
+ country-name CountryName OPTIONAL,
+ administration-domain-name AdministrationDomainName OPTIONAL,
+ network-address [0] IMPLICIT NetworkAddress OPTIONAL,
+ -- see also extended-network-address
+ terminal-identifier [1] IMPLICIT TerminalIdentifier OPTIONAL,
+ private-domain-name [2] PrivateDomainName OPTIONAL,
+ organization-name [3] IMPLICIT OrganizationName OPTIONAL,
+ -- see also teletex-organization-name
+ numeric-user-identifier [4] IMPLICIT NumericUserIdentifier
+ OPTIONAL,
+ personal-name [5] IMPLICIT PersonalName OPTIONAL,
+ -- see also teletex-personal-name
+ organizational-unit-names [6] IMPLICIT OrganizationalUnitNames
+ OPTIONAL }
+ -- see also teletex-organizational-unit-names
+
+ CountryName ::= [APPLICATION 1] CHOICE {
+ x121-dcc-code NumericString
+ (SIZE (ub-country-name-numeric-length)),
+ iso-3166-alpha2-code PrintableString
+ (SIZE (ub-country-name-alpha-length)) }
+
+ AdministrationDomainName ::= [APPLICATION 2] CHOICE {
+ numeric NumericString (SIZE (0..ub-domain-name-length)),
+ printable PrintableString (SIZE (0..ub-domain-name-length)) }
+
+ NetworkAddress ::= X121Address -- see also extended-network-address
+
+ X121Address ::= NumericString (SIZE (1..ub-x121-address-length))
+
+ TerminalIdentifier ::= PrintableString (SIZE
+ (1..ub-terminal-id-length))
+
+ PrivateDomainName ::= CHOICE {
+ numeric NumericString (SIZE (1..ub-domain-name-length)),
+ printable PrintableString (SIZE (1..ub-domain-name-length)) }
+
+ OrganizationName ::= PrintableString
+ (SIZE (1..ub-organization-name-length))
+ -- see also teletex-organization-name
+
+ NumericUserIdentifier ::= NumericString
+ (SIZE (1..ub-numeric-user-id-length))
+
+ PersonalName ::= SET {
+ surname [0] IMPLICIT PrintableString
+ (SIZE (1..ub-surname-length)),
+ given-name [1] IMPLICIT PrintableString
+ (SIZE (1..ub-given-name-length)) OPTIONAL,
+ initials [2] IMPLICIT PrintableString
+ (SIZE (1..ub-initials-length)) OPTIONAL,
+ generation-qualifier [3] IMPLICIT PrintableString
+ (SIZE (1..ub-generation-qualifier-length))
+ OPTIONAL }
+ -- see also teletex-personal-name
+
+ OrganizationalUnitNames ::= SEQUENCE SIZE (1..ub-organizational-units)
+ OF OrganizationalUnitName
+ -- see also teletex-organizational-unit-names
+
+ OrganizationalUnitName ::= PrintableString (SIZE
+ (1..ub-organizational-unit-name-length))
+
+ -- Built-in Domain-defined Attributes
+
+ BuiltInDomainDefinedAttributes ::= SEQUENCE SIZE
+ (1..ub-domain-defined-attributes) OF
+ BuiltInDomainDefinedAttribute
+
+ BuiltInDomainDefinedAttribute ::= SEQUENCE {
+ type PrintableString (SIZE
+ (1..ub-domain-defined-attribute-type-length)),
+ value PrintableString (SIZE
+ (1..ub-domain-defined-attribute-value-length)) }
+
+ -- Extension Attributes
+
+ ExtensionAttributes ::= SET SIZE (1..ub-extension-attributes) OF
+ ExtensionAttribute
+
+ EXTENSION-ATTRIBUTE ::= CLASS {
+ &id INTEGER (0..ub-extension-attributes) UNIQUE,
+ &Type
+ } WITH SYNTAX { &Type IDENTIFIED BY &id }
+
+ ExtensionAttribute ::= SEQUENCE {
+ extension-attribute-type [0] IMPLICIT EXTENSION-ATTRIBUTE.
+ &id({SupportedExtensionAttributes}),
+ extension-attribute-value [1] EXTENSION-ATTRIBUTE.
+ &Type({SupportedExtensionAttributes}
+ {@extension-attribute-type})}
+
+ SupportedExtensionAttributes EXTENSION-ATTRIBUTE ::= {
+ ea-commonName | ea-teletexCommonName | ea-teletexOrganizationName
+ | ea-teletexPersonalName | ea-teletexOrganizationalUnitNames |
+ ea-pDSName | ea-physicalDeliveryCountryName | ea-postalCode |
+ ea-physicalDeliveryOfficeName | ea-physicalDeliveryOfficeNumber |
+ ea-extensionORAddressComponents | ea-physicalDeliveryPersonalName
+ | ea-physicalDeliveryOrganizationName |
+ ea-extensionPhysicalDeliveryAddressComponents |
+ ea-unformattedPostalAddress | ea-streetAddress |
+ ea-postOfficeBoxAddress | ea-posteRestanteAddress |
+ ea-uniquePostalName | ea-localPostalAttributes |
+ ea-extendedNetworkAddress | ea-terminalType |
+ ea-teletexDomainDefinedAttributes, ... }
+
+ -- Extension types and attribute values
+
+ ea-commonName EXTENSION-ATTRIBUTE ::= { PrintableString
+ (SIZE (1..ub-common-name-length)) IDENTIFIED BY 1 }
+
+ ea-teletexCommonName EXTENSION-ATTRIBUTE ::= {TeletexString
+ (SIZE (1..ub-common-name-length)) IDENTIFIED BY 2 }
+
+ ea-teletexOrganizationName EXTENSION-ATTRIBUTE::= { TeletexString
+ (SIZE (1..ub-organization-name-length)) IDENTIFIED BY 3 }
+
+ ea-teletexPersonalName EXTENSION-ATTRIBUTE ::= {SET {
+ surname [0] IMPLICIT TeletexString
+ (SIZE (1..ub-surname-length)),
+ given-name [1] IMPLICIT TeletexString
+ (SIZE (1..ub-given-name-length)) OPTIONAL,
+ initials [2] IMPLICIT TeletexString
+ (SIZE (1..ub-initials-length)) OPTIONAL,
+ generation-qualifier [3] IMPLICIT TeletexString
+ (SIZE (1..ub-generation-qualifier-length))
+ OPTIONAL } IDENTIFIED BY 4 }
+
+ ea-teletexOrganizationalUnitNames EXTENSION-ATTRIBUTE ::=
+ { SEQUENCE SIZE (1..ub-organizational-units) OF
+ TeletexOrganizationalUnitName IDENTIFIED BY 5 }
+
+ TeletexOrganizationalUnitName ::= TeletexString
+ (SIZE (1..ub-organizational-unit-name-length))
+
+ ea-pDSName EXTENSION-ATTRIBUTE ::= {PrintableString
+ (SIZE (1..ub-pds-name-length)) IDENTIFIED BY 7 }
+
+ ea-physicalDeliveryCountryName EXTENSION-ATTRIBUTE ::= { CHOICE {
+ x121-dcc-code NumericString (SIZE
+ (ub-country-name-numeric-length)),
+ iso-3166-alpha2-code PrintableString
+ (SIZE (ub-country-name-alpha-length)) }
+ IDENTIFIED BY 8 }
+
+ ea-postalCode EXTENSION-ATTRIBUTE ::= { CHOICE {
+ numeric-code NumericString (SIZE (1..ub-postal-code-length)),
+ printable-code PrintableString (SIZE (1..ub-postal-code-length)) }
+ IDENTIFIED BY 9 }
+
+ ea-physicalDeliveryOfficeName EXTENSION-ATTRIBUTE ::=
+ { PDSParameter IDENTIFIED BY 10 }
+
+ ea-physicalDeliveryOfficeNumber EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 11 }
+
+ ea-extensionORAddressComponents EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 12 }
+
+ ea-physicalDeliveryPersonalName EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 13}
+
+ ea-physicalDeliveryOrganizationName EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 14 }
+
+ ea-extensionPhysicalDeliveryAddressComponents EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 15 }
+
+ ea-unformattedPostalAddress EXTENSION-ATTRIBUTE ::= { SET {
+ printable-address SEQUENCE SIZE (1..ub-pds-physical-address-lines)
+ OF PrintableString (SIZE (1..ub-pds-parameter-length))
+ OPTIONAL,
+ teletex-string TeletexString
+ (SIZE (1..ub-unformatted-address-length)) OPTIONAL }
+ IDENTIFIED BY 16 }
+
+ ea-streetAddress EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 17 }
+
+ ea-postOfficeBoxAddress EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 18 }
+
+ ea-posteRestanteAddress EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 19 }
+
+ ea-uniquePostalName EXTENSION-ATTRIBUTE ::=
+ { PDSParameter IDENTIFIED BY 20 }
+
+ ea-localPostalAttributes EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 21 }
+ PDSParameter ::= SET {
+ printable-string PrintableString
+ (SIZE(1..ub-pds-parameter-length)) OPTIONAL,
+ teletex-string TeletexString
+ (SIZE(1..ub-pds-parameter-length)) OPTIONAL }
+
+ ea-extendedNetworkAddress EXTENSION-ATTRIBUTE ::= {
+ CHOICE {
+ e163-4-address SEQUENCE {
+ number [0] IMPLICIT NumericString
+ (SIZE (1..ub-e163-4-number-length)),
+ sub-address [1] IMPLICIT NumericString
+ (SIZE (1..ub-e163-4-sub-address-length)) OPTIONAL
+ },
+ psap-address [0] IMPLICIT PresentationAddress
+ } IDENTIFIED BY 22
+ }
+
+ PresentationAddress ::= SEQUENCE {
+ pSelector [0] EXPLICIT OCTET STRING OPTIONAL,
+ sSelector [1] EXPLICIT OCTET STRING OPTIONAL,
+ tSelector [2] EXPLICIT OCTET STRING OPTIONAL,
+ nAddresses [3] EXPLICIT SET SIZE (1..MAX) OF OCTET STRING }
+
+ ea-terminalType EXTENSION-ATTRIBUTE ::= {INTEGER {
+ telex (3),
+ teletex (4),
+ g3-facsimile (5),
+ g4-facsimile (6),
+ ia5-terminal (7),
+ videotex (8) } (0..ub-integer-options)
+ IDENTIFIED BY 23 }
+
+ -- Extension Domain-defined Attributes
+
+ ea-teletexDomainDefinedAttributes EXTENSION-ATTRIBUTE ::=
+ { SEQUENCE SIZE (1..ub-domain-defined-attributes) OF
+ TeletexDomainDefinedAttribute IDENTIFIED BY 6 }
+
+ TeletexDomainDefinedAttribute ::= SEQUENCE {
+ type TeletexString
+ (SIZE (1..ub-domain-defined-attribute-type-length)),
+ value TeletexString
+ (SIZE (1..ub-domain-defined-attribute-value-length)) }
+
+ -- specifications of Upper Bounds MUST be regarded as mandatory
+ -- from Annex B of ITU-T X.411 Reference Definition of MTS Parameter
+ -- Upper Bounds
+ -- Upper Bounds
+ ub-match INTEGER ::= 128
+ ub-common-name-length INTEGER ::= 64
+ ub-country-name-alpha-length INTEGER ::= 2
+ ub-country-name-numeric-length INTEGER ::= 3
+ ub-domain-defined-attributes INTEGER ::= 4
+ ub-domain-defined-attribute-type-length INTEGER ::= 8
+ ub-domain-defined-attribute-value-length INTEGER ::= 128
+ ub-domain-name-length INTEGER ::= 16
+ ub-extension-attributes INTEGER ::= 256
+ ub-e163-4-number-length INTEGER ::= 15
+ ub-e163-4-sub-address-length INTEGER ::= 40
+ ub-generation-qualifier-length INTEGER ::= 3
+ ub-given-name-length INTEGER ::= 16
+ ub-initials-length INTEGER ::= 5
+ ub-integer-options INTEGER ::= 256
+ ub-numeric-user-id-length INTEGER ::= 32
+ ub-organization-name-length INTEGER ::= 64
+ ub-organizational-unit-name-length INTEGER ::= 32
+ ub-organizational-units INTEGER ::= 4
+ ub-pds-name-length INTEGER ::= 16
+ ub-pds-parameter-length INTEGER ::= 30
+ ub-pds-physical-address-lines INTEGER ::= 6
+ ub-postal-code-length INTEGER ::= 16
+ ub-surname-length INTEGER ::= 40
+ ub-terminal-id-length INTEGER ::= 24
+ ub-unformatted-address-length INTEGER ::= 180
+ ub-x121-address-length INTEGER ::= 16
+
+ -- Note - upper bounds on string types, such as TeletexString, are
+ -- measured in characters. Excepting PrintableString or IA5String, a
+ -- significantly greater number of octets will be required to hold
+ -- such a value. As a minimum, 16 octets or twice the specified
+ -- upper bound, whichever is the larger, should be allowed for
+ -- TeletexString. For UTF8String or UniversalString, at least four
+ -- times the upper bound should be allowed.
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1-PSS-OAEP-Algorithms-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1-PSS-OAEP-Algorithms-2009.asn1
new file mode 100644
index 0000000000..b1232fb8f2
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1-PSS-OAEP-Algorithms-2009.asn1
@@ -0,0 +1,308 @@
+ PKIX1-PSS-OAEP-Algorithms-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-rsa-pkalgs-02(54)}
+ DEFINITIONS EXPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ AlgorithmIdentifier{}, ALGORITHM, DIGEST-ALGORITHM, KEY-TRANSPORT,
+ SIGNATURE-ALGORITHM, PUBLIC-KEY, SMIME-CAPS
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ id-sha1, mda-sha1, pk-rsa, RSAPublicKey
+ FROM PKIXAlgs-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-algorithms2008-02(56)};
+
+ -- ============================
+ -- Object Set exports
+ -- ============================
+ --
+ -- Define top-level symbols with all of the objects defined for
+ -- export to other modules. These objects would be included as part
+ -- of an Object Set to restrict the set of legal values.
+ --
+
+ PublicKeys PUBLIC-KEY ::= { pk-rsaSSA-PSS | pk-rsaES-OAEP, ... }
+ SignatureAlgs SIGNATURE-ALGORITHM ::= { sa-rsaSSA-PSS, ...}
+ KeyTransportAlgs KEY-TRANSPORT ::= { kta-rsaES-OAEP, ... }
+ HashAlgs DIGEST-ALGORITHM ::= { mda-sha224 | mda-sha256 | mda-sha384
+ | mda-sha512, ... }
+ SMimeCaps SMIME-CAPS ::= {
+ sa-rsaSSA-PSS.&smimeCaps |
+ kta-rsaES-OAEP.&smimeCaps,
+ ...
+ }
+
+ -- =============================
+ -- Algorithm Objects
+ -- =============================
+
+ --
+ -- Public key object for PSS signatures
+ --
+
+ pk-rsaSSA-PSS PUBLIC-KEY ::= {
+ IDENTIFIER id-RSASSA-PSS
+ KEY RSAPublicKey
+ PARAMS TYPE RSASSA-PSS-params ARE optional
+ -- Private key format not in this module --
+ CERT-KEY-USAGE { nonRepudiation, digitalSignature,
+ keyCertSign, cRLSign }
+ }
+
+ --
+ -- Signature algorithm definition for PSS signatures
+ --
+
+ sa-rsaSSA-PSS SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER id-RSASSA-PSS
+ PARAMS TYPE RSASSA-PSS-params ARE required
+ HASHES { mda-sha1 | mda-sha224 | mda-sha256 | mda-sha384
+ | mda-sha512 }
+ PUBLIC-KEYS { pk-rsa | pk-rsaSSA-PSS }
+ SMIME-CAPS { IDENTIFIED BY id-RSASSA-PSS }
+ }
+
+ --
+ -- Signature algorithm definitions for PKCS v1.5 signatures
+ --
+
+ sa-sha224WithRSAEncryption SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER sha224WithRSAEncryption
+ PARAMS TYPE NULL ARE required
+ HASHES { mda-sha224 }
+ PUBLIC-KEYS { pk-rsa }
+ SMIME-CAPS { IDENTIFIED BY sha224WithRSAEncryption }
+ }
+ sha224WithRSAEncryption OBJECT IDENTIFIER ::= { pkcs-1 14 }
+
+ sa-sha256WithRSAEncryption SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER sha256WithRSAEncryption
+ PARAMS TYPE NULL ARE required
+ HASHES { mda-sha256 }
+ PUBLIC-KEYS { pk-rsa }
+ SMIME-CAPS { IDENTIFIED BY sha256WithRSAEncryption }
+ }
+ sha256WithRSAEncryption OBJECT IDENTIFIER ::= { pkcs-1 11 }
+
+ sa-sha384WithRSAEncryption SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER sha384WithRSAEncryption
+ PARAMS TYPE NULL ARE required
+ HASHES { mda-sha384 }
+ PUBLIC-KEYS { pk-rsa }
+ SMIME-CAPS { IDENTIFIED BY sha384WithRSAEncryption }
+ }
+ sha384WithRSAEncryption OBJECT IDENTIFIER ::= { pkcs-1 12 }
+
+ sa-sha512WithRSAEncryption SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER sha512WithRSAEncryption
+ PARAMS TYPE NULL ARE required
+ HASHES { mda-sha512 }
+ PUBLIC-KEYS { pk-rsa }
+ SMIME-CAPS { IDENTIFIED BY sha512WithRSAEncryption }
+ }
+ sha512WithRSAEncryption OBJECT IDENTIFIER ::= { pkcs-1 13 }
+
+ --
+ -- Public key definition for OAEP encryption
+ --
+
+ pk-rsaES-OAEP PUBLIC-KEY ::= {
+ IDENTIFIER id-RSAES-OAEP
+ KEY RSAPublicKey
+ PARAMS TYPE RSAES-OAEP-params ARE optional
+ -- Private key format not in this module --
+ CERT-KEY-USAGE {keyEncipherment, dataEncipherment}
+ }
+
+ --
+ -- Key transport key lock definition for OAEP encryption
+ --
+
+ kta-rsaES-OAEP KEY-TRANSPORT ::= {
+ IDENTIFIER id-RSAES-OAEP
+ PARAMS TYPE RSAES-OAEP-params ARE required
+ PUBLIC-KEYS { pk-rsa | pk-rsaES-OAEP }
+ SMIME-CAPS { TYPE RSAES-OAEP-params IDENTIFIED BY id-RSAES-OAEP}
+ }
+ -- ============================
+ -- Basic object identifiers
+ -- ============================
+
+ pkcs-1 OBJECT IDENTIFIER ::=
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) 1 }
+
+ -- When rsaEncryption is used in an AlgorithmIdentifier, the
+ -- parameters MUST be present and MUST be NULL.
+ -- rsaEncryption OBJECT IDENTIFIER ::= { pkcs-1 1 }
+
+ -- When id-RSAES-OAEP is used in an AlgorithmIdentifier,
+ -- and the parameters field is present, it MUST be
+ -- RSAES-OAEP-params.
+
+ id-RSAES-OAEP OBJECT IDENTIFIER ::= { pkcs-1 7 }
+
+ -- When id-mgf1 is used in an AlgorithmIdentifier, the parameters
+ -- MUST be present and MUST be a HashAlgorithm.
+
+ id-mgf1 OBJECT IDENTIFIER ::= { pkcs-1 8 }
+
+ -- When id-pSpecified is used in an AlgorithmIdentifier, the
+ -- parameters MUST be an OCTET STRING.
+
+ id-pSpecified OBJECT IDENTIFIER ::= { pkcs-1 9 }
+
+ -- When id-RSASSA-PSS is used in an AlgorithmIdentifier, and the
+ -- parameters field is present, it MUST be RSASSA-PSS-params.
+
+ id-RSASSA-PSS OBJECT IDENTIFIER ::= { pkcs-1 10 }
+
+ -- When the following OIDs are used in an AlgorithmIdentifier, the
+ -- parameters SHOULD be absent, but if the parameters are present,
+ -- they MUST be NULL.
+
+ --
+ -- id-sha1 is imported from RFC 3279. Additionally, the v1.5
+ -- signature algorithms (i.e., rsaWithSHA256) are now solely placed
+ -- in that module.
+ --
+
+ id-sha224 OBJECT IDENTIFIER ::=
+ { joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101)
+ csor(3) nistAlgorithms(4) hashalgs(2) 4 }
+
+ mda-sha224 DIGEST-ALGORITHM ::= {
+ IDENTIFIER id-sha224
+ PARAMS TYPE NULL ARE preferredAbsent
+ }
+
+ id-sha256 OBJECT IDENTIFIER ::=
+ { joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101)
+ csor(3) nistAlgorithms(4) hashalgs(2) 1 }
+
+ mda-sha256 DIGEST-ALGORITHM ::= {
+ IDENTIFIER id-sha256
+ PARAMS TYPE NULL ARE preferredAbsent
+ }
+ id-sha384 OBJECT IDENTIFIER ::=
+ { joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101)
+ csor(3) nistAlgorithms(4) hashalgs(2) 2 }
+
+ mda-sha384 DIGEST-ALGORITHM ::= {
+ IDENTIFIER id-sha384
+ PARAMS TYPE NULL ARE preferredAbsent
+ }
+ id-sha512 OBJECT IDENTIFIER ::=
+ { joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101)
+ csor(3) nistAlgorithms(4) hashalgs(2) 3 }
+
+ mda-sha512 DIGEST-ALGORITHM ::= {
+ IDENTIFIER id-sha512
+ PARAMS TYPE NULL ARE preferredAbsent
+ }
+
+ -- =============
+ -- Constants
+ -- =============
+
+ EncodingParameters ::= OCTET STRING(SIZE(0..MAX))
+
+ nullOctetString EncodingParameters ::= ''H
+
+ nullParameters NULL ::= NULL
+
+ -- =========================
+ -- Algorithm Identifiers
+ -- =========================
+
+ HashAlgorithm ::= AlgorithmIdentifier{DIGEST-ALGORITHM,
+ {HashAlgorithms}}
+
+ HashAlgorithms DIGEST-ALGORITHM ::= {
+ { IDENTIFIER id-sha1 PARAMS TYPE NULL ARE preferredPresent } |
+ { IDENTIFIER id-sha224 PARAMS TYPE NULL ARE preferredPresent } |
+ { IDENTIFIER id-sha256 PARAMS TYPE NULL ARE preferredPresent } |
+ { IDENTIFIER id-sha384 PARAMS TYPE NULL ARE preferredPresent } |
+ { IDENTIFIER id-sha512 PARAMS TYPE NULL ARE preferredPresent }
+ }
+
+ sha1Identifier HashAlgorithm ::= {
+ algorithm id-sha1,
+ parameters NULL : NULL
+ }
+
+ --
+ -- We have a default algorithm - create the value here
+ --
+
+ MaskGenAlgorithm ::= AlgorithmIdentifier{ALGORITHM,
+ {PKCS1MGFAlgorithms}}
+
+ mgf1SHA1 MaskGenAlgorithm ::= {
+ algorithm id-mgf1,
+ parameters HashAlgorithm : sha1Identifier
+ }
+
+ --
+ -- Define the set of mask generation functions
+ --
+ -- If the identifier is id-mgf1, any of the listed hash
+ -- algorithms may be used.
+ --
+
+ PKCS1MGFAlgorithms ALGORITHM ::= {
+ { IDENTIFIER id-mgf1 PARAMS TYPE HashAlgorithm ARE required },
+ ...
+ }
+
+ --
+ -- Define the set of known source algorithms for PSS
+ --
+
+ PSourceAlgorithm ::= AlgorithmIdentifier{ALGORITHM,
+ {PSS-SourceAlgorithms}}
+
+ PSS-SourceAlgorithms ALGORITHM ::= {
+ { IDENTIFIER id-pSpecified PARAMS TYPE EncodingParameters
+ ARE required },
+ ...
+ }
+ pSpecifiedEmpty PSourceAlgorithm ::= {
+ algorithm id-pSpecified,
+ parameters EncodingParameters : nullOctetString
+ }
+
+ -- ===================
+ -- Main structures
+ -- ===================
+
+ -- AlgorithmIdentifier parameters for id-RSASSA-PSS.
+ -- Note that the tags in this Sequence are explicit.
+ -- Note: The hash algorithm in hashAlgorithm and in
+ -- maskGenAlgorithm should be the same.
+
+ RSASSA-PSS-params ::= SEQUENCE {
+ hashAlgorithm [0] HashAlgorithm DEFAULT sha1Identifier,
+ maskGenAlgorithm [1] MaskGenAlgorithm DEFAULT mgf1SHA1,
+ saltLength [2] INTEGER DEFAULT 20,
+ trailerField [3] INTEGER DEFAULT 1
+ }
+
+ -- AlgorithmIdentifier parameters for id-RSAES-OAEP.
+ -- Note that the tags in this Sequence are explicit.
+ -- Note: The hash algorithm in hashFunc and in
+ -- maskGenFunc should be the same.
+
+ RSAES-OAEP-params ::= SEQUENCE {
+ hashFunc [0] HashAlgorithm DEFAULT sha1Identifier,
+ maskGenFunc [1] MaskGenAlgorithm DEFAULT mgf1SHA1,
+ pSourceFunc [2] PSourceAlgorithm DEFAULT
+ pSpecifiedEmpty
+ }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Explicit-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Explicit-2009.asn1
new file mode 100644
index 0000000000..613e0e9d2c
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Explicit-2009.asn1
@@ -0,0 +1,415 @@
+ PKIX1Explicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1)
+ security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-explicit-02(51)}
+ DEFINITIONS EXPLICIT TAGS ::=
+ BEGIN
+
+ IMPORTS
+
+ Extensions{}, EXTENSION, ATTRIBUTE, SingleAttribute{}
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57)}
+
+ AlgorithmIdentifier{}, PUBLIC-KEY, SIGNATURE-ALGORITHM
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ CertExtensions, CrlExtensions, CrlEntryExtensions
+ FROM PKIX1Implicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)}
+ SignatureAlgs, PublicKeys
+ FROM PKIXAlgs-2009
+ {iso(1) identified-organization(3) dod(6)
+ internet(1) security(5) mechanisms(5) pkix(7) id-mod(0) 56}
+
+ SignatureAlgs, PublicKeys
+ FROM PKIX1-PSS-OAEP-Algorithms-2009
+ {iso(1) identified-organization(3) dod(6)
+ internet(1) security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-rsa-pkalgs-02(54)}
+
+ ORAddress
+ FROM PKIX-X400Address-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-x400address-02(60)};
+
+ id-pkix OBJECT IDENTIFIER ::=
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7)}
+
+ -- PKIX arcs
+
+ id-pe OBJECT IDENTIFIER ::= { id-pkix 1 }
+ -- arc for private certificate extensions
+ id-qt OBJECT IDENTIFIER ::= { id-pkix 2 }
+ -- arc for policy qualifier types
+ id-kp OBJECT IDENTIFIER ::= { id-pkix 3 }
+ -- arc for extended key purpose OIDs
+ id-ad OBJECT IDENTIFIER ::= { id-pkix 48 }
+ -- arc for access descriptors
+
+ -- policyQualifierIds for Internet policy qualifiers
+
+ id-qt-cps OBJECT IDENTIFIER ::= { id-qt 1 }
+ -- OID for CPS qualifier
+ id-qt-unotice OBJECT IDENTIFIER ::= { id-qt 2 }
+ -- OID for user notice qualifier
+
+ -- access descriptor definitions
+
+ id-ad-ocsp OBJECT IDENTIFIER ::= { id-ad 1 }
+ id-ad-caIssuers OBJECT IDENTIFIER ::= { id-ad 2 }
+ id-ad-timeStamping OBJECT IDENTIFIER ::= { id-ad 3 }
+ id-ad-caRepository OBJECT IDENTIFIER ::= { id-ad 5 }
+
+ -- attribute data types
+ AttributeType ::= ATTRIBUTE.&id
+
+ -- Replaced by SingleAttribute{}
+ --
+ -- AttributeTypeAndValue ::= SEQUENCE {
+ -- type ATTRIBUTE.&id({SupportedAttributes}),
+ -- value ATTRIBUTE.&Type({SupportedAttributes}{@type}) }
+ --
+
+ -- Suggested naming attributes: Definition of the following
+ -- information object set may be augmented to meet local
+ -- requirements. Note that deleting members of the set may
+ -- prevent interoperability with conforming implementations.
+ -- All attributes are presented in pairs: the AttributeType
+ -- followed by the type definition for the corresponding
+ -- AttributeValue.
+
+ -- Arc for standard naming attributes
+
+ id-at OBJECT IDENTIFIER ::= { joint-iso-ccitt(2) ds(5) 4 }
+
+ -- Naming attributes of type X520name
+
+ id-at-name AttributeType ::= { id-at 41 }
+ at-name ATTRIBUTE ::= { TYPE X520name IDENTIFIED BY id-at-name }
+
+ id-at-surname AttributeType ::= { id-at 4 }
+ at-surname ATTRIBUTE ::= { TYPE X520name IDENTIFIED BY id-at-surname }
+
+ id-at-givenName AttributeType ::= { id-at 42 }
+ at-givenName ATTRIBUTE ::=
+ { TYPE X520name IDENTIFIED BY id-at-givenName }
+
+ id-at-initials AttributeType ::= { id-at 43 }
+ at-initials ATTRIBUTE ::=
+ { TYPE X520name IDENTIFIED BY id-at-initials }
+
+ id-at-generationQualifier AttributeType ::= { id-at 44 }
+ at-generationQualifier ATTRIBUTE ::=
+ { TYPE X520name IDENTIFIED BY id-at-generationQualifier }
+
+ -- Directory string type --
+
+ DirectoryString{INTEGER:maxSize} ::= CHOICE {
+ teletexString TeletexString(SIZE (1..maxSize)),
+ printableString PrintableString(SIZE (1..maxSize)),
+ bmpString BMPString(SIZE (1..maxSize)),
+ universalString UniversalString(SIZE (1..maxSize)),
+ uTF8String UTF8String(SIZE (1..maxSize))
+ }
+
+ X520name ::= DirectoryString {ub-name}
+
+ -- Naming attributes of type X520CommonName
+
+ id-at-commonName AttributeType ::= { id-at 3 }
+
+ at-x520CommonName ATTRIBUTE ::=
+ {TYPE X520CommonName IDENTIFIED BY id-at-commonName }
+
+ X520CommonName ::= DirectoryString {ub-common-name}
+
+ -- Naming attributes of type X520LocalityName
+
+ id-at-localityName AttributeType ::= { id-at 7 }
+
+ at-x520LocalityName ATTRIBUTE ::=
+ { TYPE X520LocalityName IDENTIFIED BY id-at-localityName }
+ X520LocalityName ::= DirectoryString {ub-locality-name}
+
+ -- Naming attributes of type X520StateOrProvinceName
+
+ id-at-stateOrProvinceName AttributeType ::= { id-at 8 }
+
+ at-x520StateOrProvinceName ATTRIBUTE ::=
+ { TYPE DirectoryString {ub-state-name}
+ IDENTIFIED BY id-at-stateOrProvinceName }
+ X520StateOrProvinceName ::= DirectoryString {ub-state-name}
+
+ -- Naming attributes of type X520OrganizationName
+
+ id-at-organizationName AttributeType ::= { id-at 10 }
+
+ at-x520OrganizationName ATTRIBUTE ::=
+ { TYPE DirectoryString {ub-organization-name}
+ IDENTIFIED BY id-at-organizationName }
+ X520OrganizationName ::= DirectoryString {ub-organization-name}
+
+ -- Naming attributes of type X520OrganizationalUnitName
+
+ id-at-organizationalUnitName AttributeType ::= { id-at 11 }
+
+ at-x520OrganizationalUnitName ATTRIBUTE ::=
+ { TYPE DirectoryString {ub-organizational-unit-name}
+ IDENTIFIED BY id-at-organizationalUnitName }
+ X520OrganizationalUnitName ::= DirectoryString
+ {ub-organizational-unit-name}
+
+ -- Naming attributes of type X520Title
+
+ id-at-title AttributeType ::= { id-at 12 }
+
+ at-x520Title ATTRIBUTE ::= { TYPE DirectoryString { ub-title }
+ IDENTIFIED BY id-at-title }
+
+ -- Naming attributes of type X520dnQualifier
+
+ id-at-dnQualifier AttributeType ::= { id-at 46 }
+
+ at-x520dnQualifier ATTRIBUTE ::= { TYPE PrintableString
+ IDENTIFIED BY id-at-dnQualifier }
+
+ -- Naming attributes of type X520countryName (digraph from IS 3166)
+
+ id-at-countryName AttributeType ::= { id-at 6 }
+
+ at-x520countryName ATTRIBUTE ::= { TYPE PrintableString (SIZE (2))
+ IDENTIFIED BY id-at-countryName }
+
+ -- Naming attributes of type X520SerialNumber
+
+ id-at-serialNumber AttributeType ::= { id-at 5 }
+
+ at-x520SerialNumber ATTRIBUTE ::= {TYPE PrintableString
+ (SIZE (1..ub-serial-number)) IDENTIFIED BY id-at-serialNumber }
+
+ -- Naming attributes of type X520Pseudonym
+
+ id-at-pseudonym AttributeType ::= { id-at 65 }
+
+ at-x520Pseudonym ATTRIBUTE ::= { TYPE DirectoryString {ub-pseudonym}
+ IDENTIFIED BY id-at-pseudonym }
+
+ -- Naming attributes of type DomainComponent (from RFC 2247)
+
+ id-domainComponent AttributeType ::=
+ { itu-t(0) data(9) pss(2342) ucl(19200300) pilot(100)
+ pilotAttributeType(1) 25 }
+
+ at-domainComponent ATTRIBUTE ::= {TYPE IA5String
+ IDENTIFIED BY id-domainComponent }
+
+ -- Legacy attributes
+
+ pkcs-9 OBJECT IDENTIFIER ::=
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) 9 }
+ id-emailAddress AttributeType ::= { pkcs-9 1 }
+
+ at-emailAddress ATTRIBUTE ::= {TYPE IA5String
+ (SIZE (1..ub-emailaddress-length)) IDENTIFIED BY
+ id-emailAddress }
+
+ -- naming data types --
+
+ Name ::= CHOICE { -- only one possibility for now --
+ rdnSequence RDNSequence }
+
+ RDNSequence ::= SEQUENCE OF RelativeDistinguishedName
+
+ DistinguishedName ::= RDNSequence
+
+ RelativeDistinguishedName ::=
+ SET SIZE (1 .. MAX) OF SingleAttribute { {SupportedAttributes} }
+
+ -- These are the known name elements for a DN
+
+ SupportedAttributes ATTRIBUTE ::= {
+ at-name | at-surname | at-givenName | at-initials |
+ at-generationQualifier | at-x520CommonName |
+ at-x520LocalityName | at-x520StateOrProvinceName |
+ at-x520OrganizationName | at-x520OrganizationalUnitName |
+ at-x520Title | at-x520dnQualifier | at-x520countryName |
+ at-x520SerialNumber | at-x520Pseudonym | at-domainComponent |
+ at-emailAddress, ... }
+
+ --
+ -- Certificate- and CRL-specific structures begin here
+ --
+
+ Certificate ::= SIGNED{TBSCertificate}
+
+ TBSCertificate ::= SEQUENCE {
+ version [0] Version DEFAULT v1,
+ serialNumber CertificateSerialNumber,
+ signature AlgorithmIdentifier{SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}},
+ issuer Name,
+ validity Validity,
+ subject Name,
+ subjectPublicKeyInfo SubjectPublicKeyInfo,
+ ... ,
+ [[2: -- If present, version MUST be v2
+ issuerUniqueID [1] IMPLICIT UniqueIdentifier OPTIONAL,
+ subjectUniqueID [2] IMPLICIT UniqueIdentifier OPTIONAL
+ ]],
+ [[3: -- If present, version MUST be v3 --
+ extensions [3] Extensions{{CertExtensions}} OPTIONAL
+ ]], ... }
+
+ Version ::= INTEGER { v1(0), v2(1), v3(2) }
+
+ CertificateSerialNumber ::= INTEGER
+
+ Validity ::= SEQUENCE {
+ notBefore Time,
+ notAfter Time }
+
+ Time ::= CHOICE {
+ utcTime UTCTime,
+ generalTime GeneralizedTime }
+
+ UniqueIdentifier ::= BIT STRING
+
+ SubjectPublicKeyInfo ::= SEQUENCE {
+ algorithm AlgorithmIdentifier{PUBLIC-KEY,
+ {PublicKeyAlgorithms}},
+ subjectPublicKey BIT STRING }
+
+ -- CRL structures
+
+ CertificateList ::= SIGNED{TBSCertList}
+
+ TBSCertList ::= SEQUENCE {
+ version Version OPTIONAL,
+ -- if present, MUST be v2
+ signature AlgorithmIdentifier{SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}},
+ issuer Name,
+ thisUpdate Time,
+ nextUpdate Time OPTIONAL,
+ revokedCertificates SEQUENCE SIZE (1..MAX) OF SEQUENCE {
+ userCertificate CertificateSerialNumber,
+ revocationDate Time,
+ ... ,
+ [[2: -- if present, version MUST be v2
+ crlEntryExtensions Extensions{{CrlEntryExtensions}}
+ OPTIONAL
+ ]], ...
+ } OPTIONAL,
+ ... ,
+ [[2: -- if present, version MUST be v2
+ crlExtensions [0] Extensions{{CrlExtensions}}
+ OPTIONAL
+ ]], ... }
+
+ -- Version, Time, CertificateSerialNumber, and Extensions were
+ -- defined earlier for use in the certificate structure
+
+ --
+ -- The two object sets below should be expanded to include
+ -- those algorithms which are supported by the system.
+ --
+ -- For example:
+ -- SignatureAlgorithms SIGNATURE-ALGORITHM ::= {
+ -- PKIXAlgs-2008.SignatureAlgs, ...,
+ -- - - RFC 3279 provides the base set
+ -- PKIX1-PSS-OAEP-ALGORITHMS.SignatureAlgs |
+ -- - - RFC 4055 provides extension algs
+ -- OtherModule.SignatureAlgs
+ -- - - RFC XXXX provides additional extension algs
+ -- }
+
+ SignatureAlgorithms SIGNATURE-ALGORITHM ::= {
+ PKIXAlgs-2009.SignatureAlgs, ...,
+ PKIX1-PSS-OAEP-Algorithms-2009.SignatureAlgs }
+
+ PublicKeyAlgorithms PUBLIC-KEY ::= {
+ PKIXAlgs-2009.PublicKeys, ...,
+ PKIX1-PSS-OAEP-Algorithms-2009.PublicKeys}
+
+ -- Upper Bounds
+
+ ub-state-name INTEGER ::= 128
+ ub-organization-name INTEGER ::= 64
+ ub-organizational-unit-name INTEGER ::= 64
+ ub-title INTEGER ::= 64
+ ub-serial-number INTEGER ::= 64
+ ub-pseudonym INTEGER ::= 128
+ ub-emailaddress-length INTEGER ::= 255
+ ub-locality-name INTEGER ::= 128
+ ub-common-name INTEGER ::= 64
+ ub-name INTEGER ::= 32768
+
+ -- Note - upper bounds on string types, such as TeletexString, are
+ -- measured in characters. Excepting PrintableString or IA5String, a
+ -- significantly greater number of octets will be required to hold
+ -- such a value. As a minimum, 16 octets or twice the specified
+ -- upper bound, whichever is the larger, should be allowed for
+ -- TeletexString. For UTF8String or UniversalString, at least four
+ -- times the upper bound should be allowed.
+
+ -- Information object classes used in the definition
+ -- of certificates and CRLs
+
+ -- Parameterized Type SIGNED
+ --
+ -- Three different versions of doing SIGNED:
+ -- 1. Simple and close to the previous version
+ --
+ -- SIGNED{ToBeSigned} ::= SEQUENCE {
+ -- toBeSigned ToBeSigned,
+ -- algorithm AlgorithmIdentifier{SIGNATURE-ALGORITHM,
+ -- {SignatureAlgorithms}},
+ -- signature BIT STRING
+ -- }
+
+ -- 2. From Authenticated Framework
+ --
+ -- SIGNED{ToBeSigned} ::= SEQUENCE {
+ -- toBeSigned ToBeSigned,
+ -- COMPONENTS OF SIGNATURE{ToBeSigned}
+ -- }
+ -- SIGNATURE{ToBeSigned} ::= SEQUENCE {
+ -- algorithmIdentifier AlgorithmIdentifier,
+ -- encrypted ENCRYPTED-HASH{ToBeSigned}
+ -- }
+ -- ENCRYPTED-HASH{ToBeSigned} ::=
+ -- BIT STRING
+ -- (CONSTRAINED BY {
+ -- shall be the result of applying a hashing procedure to
+ -- the DER-encoded (see 4.1) octets of a value of
+ -- ToBeSigned and then applying an encipherment procedure
+ -- to those octets
+ -- })
+ --
+ --
+ -- 3. A more complex version, but one that automatically ties
+ -- together both the signature algorithm and the
+ -- signature value for automatic decoding.
+ --
+ SIGNED{ToBeSigned} ::= SEQUENCE {
+ toBeSigned ToBeSigned,
+ algorithmIdentifier SEQUENCE {
+ algorithm SIGNATURE-ALGORITHM.
+ &id({SignatureAlgorithms}),
+ parameters SIGNATURE-ALGORITHM.
+ &Params({SignatureAlgorithms}
+ {@algorithmIdentifier.algorithm}) OPTIONAL
+ },
+ signature BIT STRING (CONTAINING SIGNATURE-ALGORITHM.&Value(
+ {SignatureAlgorithms}
+ {@algorithmIdentifier.algorithm}))
+ }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Implicit-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Implicit-2009.asn1
new file mode 100644
index 0000000000..3651a5249b
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Implicit-2009.asn1
@@ -0,0 +1,447 @@
+ PKIX1Implicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)}
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ AttributeSet{}, EXTENSION, ATTRIBUTE
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57) }
+
+ id-pe, id-kp, id-qt-unotice, id-qt-cps, ORAddress, Name,
+ RelativeDistinguishedName, CertificateSerialNumber,
+ DirectoryString{}, SupportedAttributes
+ FROM PKIX1Explicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51) };
+
+ CertExtensions EXTENSION ::= {
+ ext-AuthorityKeyIdentifier | ext-SubjectKeyIdentifier |
+ ext-KeyUsage | ext-PrivateKeyUsagePeriod |
+ ext-CertificatePolicies | ext-PolicyMappings |
+ ext-SubjectAltName | ext-IssuerAltName |
+ ext-SubjectDirectoryAttributes |
+ ext-BasicConstraints | ext-NameConstraints |
+ ext-PolicyConstraints | ext-ExtKeyUsage |
+ ext-CRLDistributionPoints | ext-InhibitAnyPolicy |
+ ext-FreshestCRL | ext-AuthorityInfoAccess |
+ ext-SubjectInfoAccessSyntax, ... }
+
+ CrlExtensions EXTENSION ::= {
+ ext-AuthorityKeyIdentifier | ext-IssuerAltName |
+ ext-CRLNumber | ext-DeltaCRLIndicator |
+ ext-IssuingDistributionPoint | ext-FreshestCRL, ... }
+
+ CrlEntryExtensions EXTENSION ::= {
+ ext-CRLReason | ext-CertificateIssuer |
+ ext-HoldInstructionCode | ext-InvalidityDate, ... }
+ -- Shared arc for standard certificate and CRL extensions
+
+ id-ce OBJECT IDENTIFIER ::= { joint-iso-ccitt(2) ds(5) 29 }
+
+ -- authority key identifier OID and syntax
+
+ ext-AuthorityKeyIdentifier EXTENSION ::= { SYNTAX
+ AuthorityKeyIdentifier IDENTIFIED BY
+ id-ce-authorityKeyIdentifier }
+ id-ce-authorityKeyIdentifier OBJECT IDENTIFIER ::= { id-ce 35 }
+
+ AuthorityKeyIdentifier ::= SEQUENCE {
+ keyIdentifier [0] KeyIdentifier OPTIONAL,
+ authorityCertIssuer [1] GeneralNames OPTIONAL,
+ authorityCertSerialNumber [2] CertificateSerialNumber OPTIONAL }
+ (WITH COMPONENTS {
+ ...,
+ authorityCertIssuer PRESENT,
+ authorityCertSerialNumber PRESENT
+ } |
+ WITH COMPONENTS {
+ ...,
+ authorityCertIssuer ABSENT,
+ authorityCertSerialNumber ABSENT
+ })
+
+ KeyIdentifier ::= OCTET STRING
+
+ -- subject key identifier OID and syntax
+
+ ext-SubjectKeyIdentifier EXTENSION ::= { SYNTAX
+ KeyIdentifier IDENTIFIED BY id-ce-subjectKeyIdentifier }
+ id-ce-subjectKeyIdentifier OBJECT IDENTIFIER ::= { id-ce 14 }
+
+ -- key usage extension OID and syntax
+
+ ext-KeyUsage EXTENSION ::= { SYNTAX
+ KeyUsage IDENTIFIED BY id-ce-keyUsage }
+ id-ce-keyUsage OBJECT IDENTIFIER ::= { id-ce 15 }
+
+ KeyUsage ::= BIT STRING {
+ digitalSignature (0),
+ nonRepudiation (1), -- recent editions of X.509 have
+ -- renamed this bit to
+ -- contentCommitment
+ keyEncipherment (2),
+ dataEncipherment (3),
+ keyAgreement (4),
+ keyCertSign (5),
+ cRLSign (6),
+ encipherOnly (7),
+ decipherOnly (8)
+ }
+
+ -- private key usage period extension OID and syntax
+
+ ext-PrivateKeyUsagePeriod EXTENSION ::= { SYNTAX
+ PrivateKeyUsagePeriod IDENTIFIED BY id-ce-privateKeyUsagePeriod }
+ id-ce-privateKeyUsagePeriod OBJECT IDENTIFIER ::= { id-ce 16 }
+
+ PrivateKeyUsagePeriod ::= SEQUENCE {
+ notBefore [0] GeneralizedTime OPTIONAL,
+ notAfter [1] GeneralizedTime OPTIONAL }
+ (WITH COMPONENTS {..., notBefore PRESENT } |
+ WITH COMPONENTS {..., notAfter PRESENT })
+
+ -- certificate policies extension OID and syntax
+
+ ext-CertificatePolicies EXTENSION ::= { SYNTAX
+ CertificatePolicies IDENTIFIED BY id-ce-certificatePolicies}
+ id-ce-certificatePolicies OBJECT IDENTIFIER ::= { id-ce 32 }
+
+ CertificatePolicies ::= SEQUENCE SIZE (1..MAX) OF PolicyInformation
+
+ PolicyInformation ::= SEQUENCE {
+ policyIdentifier CertPolicyId,
+ policyQualifiers SEQUENCE SIZE (1..MAX) OF
+ PolicyQualifierInfo OPTIONAL }
+
+ CertPolicyId ::= OBJECT IDENTIFIER
+
+ CERT-POLICY-QUALIFIER ::= TYPE-IDENTIFIER
+
+ PolicyQualifierInfo ::= SEQUENCE {
+ policyQualifierId CERT-POLICY-QUALIFIER.
+ &id({PolicyQualifierId}),
+ qualifier CERT-POLICY-QUALIFIER.
+ &Type({PolicyQualifierId}{@policyQualifierId})}
+
+ -- Implementations that recognize additional policy qualifiers MUST
+ -- augment the following definition for PolicyQualifierId
+
+ PolicyQualifierId CERT-POLICY-QUALIFIER ::=
+ { pqid-cps | pqid-unotice, ... }
+
+ pqid-cps CERT-POLICY-QUALIFIER ::= { CPSuri IDENTIFIED BY id-qt-cps }
+ pqid-unotice CERT-POLICY-QUALIFIER ::= { UserNotice
+ IDENTIFIED BY id-qt-unotice }
+
+ -- CPS pointer qualifier
+
+ CPSuri ::= IA5String
+
+ -- user notice qualifier
+
+ UserNotice ::= SEQUENCE {
+ noticeRef NoticeReference OPTIONAL,
+ explicitText DisplayText OPTIONAL}
+
+ --
+ -- This is not made explicit in the text
+ --
+ -- {WITH COMPONENTS {..., noticeRef PRESENT} |
+ -- WITH COMPONENTS {..., DisplayText PRESENT }}
+
+ NoticeReference ::= SEQUENCE {
+ organization DisplayText,
+ noticeNumbers SEQUENCE OF INTEGER }
+
+ DisplayText ::= CHOICE {
+ ia5String IA5String (SIZE (1..200)),
+ visibleString VisibleString (SIZE (1..200)),
+ bmpString BMPString (SIZE (1..200)),
+ utf8String UTF8String (SIZE (1..200)) }
+
+ -- policy mapping extension OID and syntax
+
+ ext-PolicyMappings EXTENSION ::= { SYNTAX
+ PolicyMappings IDENTIFIED BY id-ce-policyMappings }
+ id-ce-policyMappings OBJECT IDENTIFIER ::= { id-ce 33 }
+
+ PolicyMappings ::= SEQUENCE SIZE (1..MAX) OF SEQUENCE {
+ issuerDomainPolicy CertPolicyId,
+ subjectDomainPolicy CertPolicyId
+ }
+
+ -- subject alternative name extension OID and syntax
+
+ ext-SubjectAltName EXTENSION ::= { SYNTAX
+ GeneralNames IDENTIFIED BY id-ce-subjectAltName }
+ id-ce-subjectAltName OBJECT IDENTIFIER ::= { id-ce 17 }
+
+ GeneralNames ::= SEQUENCE SIZE (1..MAX) OF GeneralName
+
+ GeneralName ::= CHOICE {
+ otherName [0] INSTANCE OF OTHER-NAME,
+ rfc822Name [1] IA5String,
+ dNSName [2] IA5String,
+ x400Address [3] ORAddress,
+ directoryName [4] Name,
+ ediPartyName [5] EDIPartyName,
+ uniformResourceIdentifier [6] IA5String,
+ iPAddress [7] OCTET STRING,
+ registeredID [8] OBJECT IDENTIFIER
+ }
+
+ -- AnotherName replaces OTHER-NAME ::= TYPE-IDENTIFIER, as
+ -- TYPE-IDENTIFIER is not supported in the '88 ASN.1 syntax
+
+ OTHER-NAME ::= TYPE-IDENTIFIER
+
+ EDIPartyName ::= SEQUENCE {
+ nameAssigner [0] DirectoryString {ubMax} OPTIONAL,
+ partyName [1] DirectoryString {ubMax}
+ }
+
+ -- issuer alternative name extension OID and syntax
+
+ ext-IssuerAltName EXTENSION ::= { SYNTAX
+ GeneralNames IDENTIFIED BY id-ce-issuerAltName }
+ id-ce-issuerAltName OBJECT IDENTIFIER ::= { id-ce 18 }
+
+ ext-SubjectDirectoryAttributes EXTENSION ::= { SYNTAX
+ SubjectDirectoryAttributes IDENTIFIED BY
+ id-ce-subjectDirectoryAttributes }
+ id-ce-subjectDirectoryAttributes OBJECT IDENTIFIER ::= { id-ce 9 }
+
+ SubjectDirectoryAttributes ::= SEQUENCE SIZE (1..MAX) OF
+ AttributeSet{{SupportedAttributes}}
+
+ -- basic constraints extension OID and syntax
+
+ ext-BasicConstraints EXTENSION ::= { SYNTAX
+ BasicConstraints IDENTIFIED BY id-ce-basicConstraints }
+ id-ce-basicConstraints OBJECT IDENTIFIER ::= { id-ce 19 }
+
+ BasicConstraints ::= SEQUENCE {
+ cA BOOLEAN DEFAULT FALSE,
+ pathLenConstraint INTEGER (0..MAX) OPTIONAL
+ }
+
+ -- name constraints extension OID and syntax
+ ext-NameConstraints EXTENSION ::= { SYNTAX
+ NameConstraints IDENTIFIED BY id-ce-nameConstraints }
+ id-ce-nameConstraints OBJECT IDENTIFIER ::= { id-ce 30 }
+
+ NameConstraints ::= SEQUENCE {
+ permittedSubtrees [0] GeneralSubtrees OPTIONAL,
+ excludedSubtrees [1] GeneralSubtrees OPTIONAL
+ }
+ --
+ -- This is a constraint in the issued certificates by CAs, but is
+ -- not a requirement on EEs.
+ --
+ -- (WITH COMPONENTS { ..., permittedSubtrees PRESENT} |
+ -- WITH COMPONENTS { ..., excludedSubtrees PRESENT }}
+
+ GeneralSubtrees ::= SEQUENCE SIZE (1..MAX) OF GeneralSubtree
+
+ GeneralSubtree ::= SEQUENCE {
+ base GeneralName,
+ minimum [0] BaseDistance DEFAULT 0,
+ maximum [1] BaseDistance OPTIONAL
+ }
+
+ BaseDistance ::= INTEGER (0..MAX)
+
+ -- policy constraints extension OID and syntax
+
+ ext-PolicyConstraints EXTENSION ::= { SYNTAX
+ PolicyConstraints IDENTIFIED BY id-ce-policyConstraints }
+ id-ce-policyConstraints OBJECT IDENTIFIER ::= { id-ce 36 }
+
+ PolicyConstraints ::= SEQUENCE {
+ requireExplicitPolicy [0] SkipCerts OPTIONAL,
+ inhibitPolicyMapping [1] SkipCerts OPTIONAL }
+ --
+ -- This is a constraint in the issued certificates by CAs,
+ -- but is not a requirement for EEs
+ --
+ -- (WITH COMPONENTS { ..., requireExplicitPolicy PRESENT} |
+ -- WITH COMPONENTS { ..., inhibitPolicyMapping PRESENT})
+
+ SkipCerts ::= INTEGER (0..MAX)
+
+ -- CRL distribution points extension OID and syntax
+
+ ext-CRLDistributionPoints EXTENSION ::= { SYNTAX
+ CRLDistributionPoints IDENTIFIED BY id-ce-cRLDistributionPoints}
+ id-ce-cRLDistributionPoints OBJECT IDENTIFIER ::= {id-ce 31}
+ CRLDistributionPoints ::= SEQUENCE SIZE (1..MAX) OF DistributionPoint
+
+ DistributionPoint ::= SEQUENCE {
+ distributionPoint [0] DistributionPointName OPTIONAL,
+ reasons [1] ReasonFlags OPTIONAL,
+ cRLIssuer [2] GeneralNames OPTIONAL
+ }
+ --
+ -- This is not a requirement in the text, but it seems as if it
+ -- should be
+ --
+ --(WITH COMPONENTS {..., distributionPoint PRESENT} |
+ -- WITH COMPONENTS {..., cRLIssuer PRESENT})
+
+ DistributionPointName ::= CHOICE {
+ fullName [0] GeneralNames,
+ nameRelativeToCRLIssuer [1] RelativeDistinguishedName
+ }
+
+ ReasonFlags ::= BIT STRING {
+ unused (0),
+ keyCompromise (1),
+ cACompromise (2),
+ affiliationChanged (3),
+ superseded (4),
+ cessationOfOperation (5),
+ certificateHold (6),
+ privilegeWithdrawn (7),
+ aACompromise (8)
+ }
+
+ -- extended key usage extension OID and syntax
+
+ ext-ExtKeyUsage EXTENSION ::= { SYNTAX
+ ExtKeyUsageSyntax IDENTIFIED BY id-ce-extKeyUsage }
+ id-ce-extKeyUsage OBJECT IDENTIFIER ::= {id-ce 37}
+
+ ExtKeyUsageSyntax ::= SEQUENCE SIZE (1..MAX) OF KeyPurposeId
+
+ KeyPurposeId ::= OBJECT IDENTIFIER
+
+ -- permit unspecified key uses
+
+ anyExtendedKeyUsage OBJECT IDENTIFIER ::= { id-ce-extKeyUsage 0 }
+
+ -- extended key purpose OIDs
+
+ id-kp-serverAuth OBJECT IDENTIFIER ::= { id-kp 1 }
+ id-kp-clientAuth OBJECT IDENTIFIER ::= { id-kp 2 }
+ id-kp-codeSigning OBJECT IDENTIFIER ::= { id-kp 3 }
+ id-kp-emailProtection OBJECT IDENTIFIER ::= { id-kp 4 }
+ id-kp-timeStamping OBJECT IDENTIFIER ::= { id-kp 8 }
+ id-kp-OCSPSigning OBJECT IDENTIFIER ::= { id-kp 9 }
+
+ -- inhibit any policy OID and syntax
+
+ ext-InhibitAnyPolicy EXTENSION ::= {SYNTAX
+ SkipCerts IDENTIFIED BY id-ce-inhibitAnyPolicy }
+ id-ce-inhibitAnyPolicy OBJECT IDENTIFIER ::= { id-ce 54 }
+
+ -- freshest (delta)CRL extension OID and syntax
+
+ ext-FreshestCRL EXTENSION ::= {SYNTAX
+ CRLDistributionPoints IDENTIFIED BY id-ce-freshestCRL }
+ id-ce-freshestCRL OBJECT IDENTIFIER ::= { id-ce 46 }
+
+ -- authority info access
+
+ ext-AuthorityInfoAccess EXTENSION ::= { SYNTAX
+ AuthorityInfoAccessSyntax IDENTIFIED BY
+ id-pe-authorityInfoAccess }
+ id-pe-authorityInfoAccess OBJECT IDENTIFIER ::= { id-pe 1 }
+
+ AuthorityInfoAccessSyntax ::=
+ SEQUENCE SIZE (1..MAX) OF AccessDescription
+
+ AccessDescription ::= SEQUENCE {
+ accessMethod OBJECT IDENTIFIER,
+ accessLocation GeneralName }
+
+ -- subject info access
+
+ ext-SubjectInfoAccessSyntax EXTENSION ::= { SYNTAX
+ SubjectInfoAccessSyntax IDENTIFIED BY id-pe-subjectInfoAccess }
+ id-pe-subjectInfoAccess OBJECT IDENTIFIER ::= { id-pe 11 }
+
+ SubjectInfoAccessSyntax ::=
+ SEQUENCE SIZE (1..MAX) OF AccessDescription
+
+ -- CRL number extension OID and syntax
+
+ ext-CRLNumber EXTENSION ::= {SYNTAX
+ INTEGER (0..MAX) IDENTIFIED BY id-ce-cRLNumber }
+ id-ce-cRLNumber OBJECT IDENTIFIER ::= { id-ce 20 }
+
+ CRLNumber ::= INTEGER (0..MAX)
+ -- issuing distribution point extension OID and syntax
+
+ ext-IssuingDistributionPoint EXTENSION ::= { SYNTAX
+ IssuingDistributionPoint IDENTIFIED BY
+ id-ce-issuingDistributionPoint }
+ id-ce-issuingDistributionPoint OBJECT IDENTIFIER ::= { id-ce 28 }
+
+ IssuingDistributionPoint ::= SEQUENCE {
+ distributionPoint [0] DistributionPointName OPTIONAL,
+ onlyContainsUserCerts [1] BOOLEAN DEFAULT FALSE,
+ onlyContainsCACerts [2] BOOLEAN DEFAULT FALSE,
+ onlySomeReasons [3] ReasonFlags OPTIONAL,
+ indirectCRL [4] BOOLEAN DEFAULT FALSE,
+ onlyContainsAttributeCerts [5] BOOLEAN DEFAULT FALSE
+ }
+ -- at most one of onlyContainsUserCerts, onlyContainsCACerts,
+ -- or onlyContainsAttributeCerts may be set to TRUE.
+
+ ext-DeltaCRLIndicator EXTENSION ::= { SYNTAX
+ CRLNumber IDENTIFIED BY id-ce-deltaCRLIndicator }
+ id-ce-deltaCRLIndicator OBJECT IDENTIFIER ::= { id-ce 27 }
+
+ -- CRL reasons extension OID and syntax
+
+ ext-CRLReason EXTENSION ::= { SYNTAX
+ CRLReason IDENTIFIED BY id-ce-cRLReasons }
+ id-ce-cRLReasons OBJECT IDENTIFIER ::= { id-ce 21 }
+
+ CRLReason ::= ENUMERATED {
+ unspecified (0),
+ keyCompromise (1),
+ cACompromise (2),
+ affiliationChanged (3),
+ superseded (4),
+ cessationOfOperation (5),
+ certificateHold (6),
+ removeFromCRL (8),
+ privilegeWithdrawn (9),
+ aACompromise (10)
+ }
+
+ -- certificate issuer CRL entry extension OID and syntax
+
+ ext-CertificateIssuer EXTENSION ::= { SYNTAX
+ GeneralNames IDENTIFIED BY id-ce-certificateIssuer }
+ id-ce-certificateIssuer OBJECT IDENTIFIER ::= { id-ce 29 }
+
+ -- hold instruction extension OID and syntax
+ ext-HoldInstructionCode EXTENSION ::= { SYNTAX
+ OBJECT IDENTIFIER IDENTIFIED BY id-ce-holdInstructionCode }
+ id-ce-holdInstructionCode OBJECT IDENTIFIER ::= { id-ce 23 }
+
+ -- ANSI x9 holdinstructions
+
+ holdInstruction OBJECT IDENTIFIER ::=
+ {joint-iso-itu-t(2) member-body(2) us(840) x9cm(10040) 2}
+ id-holdinstruction-none OBJECT IDENTIFIER ::=
+ {holdInstruction 1} -- deprecated
+ id-holdinstruction-callissuer OBJECT IDENTIFIER ::=
+ {holdInstruction 2}
+ id-holdinstruction-reject OBJECT IDENTIFIER ::=
+ {holdInstruction 3}
+
+ -- invalidity date CRL entry extension OID and syntax
+
+ ext-InvalidityDate EXTENSION ::= { SYNTAX
+ GeneralizedTime IDENTIFIED BY id-ce-invalidityDate }
+ id-ce-invalidityDate OBJECT IDENTIFIER ::= { id-ce 24 }
+ -- Upper bounds
+ ubMax INTEGER ::= 32768
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAlgs-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAlgs-2009.asn1
new file mode 100644
index 0000000000..d58bcb5b19
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAlgs-2009.asn1
@@ -0,0 +1,528 @@
+ PKIXAlgs-2009 { iso(1) identified-organization(3) dod(6)
+ internet(1) security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-algorithms2008-02(56) }
+
+ DEFINITIONS EXPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ PUBLIC-KEY, SIGNATURE-ALGORITHM, DIGEST-ALGORITHM, SMIME-CAPS
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ mda-sha224, mda-sha256, mda-sha384, mda-sha512
+ FROM PKIX1-PSS-OAEP-Algorithms-2009
+ {iso(1) identified-organization(3) dod(6) internet(1)
+ security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-rsa-pkalgs-02(54)} ;
+
+ --
+ -- Public Key (pk-) Algorithms
+ --
+
+ PublicKeys PUBLIC-KEY ::= {
+ pk-rsa |
+ pk-dsa |
+ pk-dh |
+ pk-kea,
+ ...,
+ pk-ec |
+ pk-ecDH |
+ pk-ecMQV
+ }
+
+ --
+ -- Signature Algorithms (sa-)
+ --
+
+ SignatureAlgs SIGNATURE-ALGORITHM ::= {
+ sa-rsaWithMD2 |
+ sa-rsaWithMD5 |
+ sa-rsaWithSHA1 |
+ sa-dsaWithSHA1 |
+ sa-ecdsaWithSHA1,
+ ..., -- Extensible
+ sa-dsaWithSHA224 |
+ sa-dsaWithSHA256 |
+ sa-ecdsaWithSHA224 |
+ sa-ecdsaWithSHA256 |
+ sa-ecdsaWithSHA384 |
+ sa-ecdsaWithSHA512
+ }
+
+ --
+ -- S/MIME CAPS for algorithms in this document
+ --
+ -- For all of the algorithms laid out in this document, the
+ -- parameters field for the S/MIME capabilities is defined as
+ -- ABSENT as there are no specific values that need to be known
+ -- by the receiver for negotiation.
+
+ --
+
+ SMimeCaps SMIME-CAPS ::= {
+ sa-rsaWithMD2.&smimeCaps |
+ sa-rsaWithMD5.&smimeCaps |
+ sa-rsaWithSHA1.&smimeCaps |
+ sa-dsaWithSHA1.&smimeCaps |
+ sa-dsaWithSHA224.&smimeCaps |
+ sa-dsaWithSHA256.&smimeCaps |
+ sa-ecdsaWithSHA1.&smimeCaps |
+ sa-ecdsaWithSHA224.&smimeCaps |
+ sa-ecdsaWithSHA256.&smimeCaps |
+ sa-ecdsaWithSHA384.&smimeCaps |
+ sa-ecdsaWithSHA512.&smimeCaps,
+ ... }
+
+ -- RSA PK Algorithm, Parameters, and Keys
+
+ pk-rsa PUBLIC-KEY ::= {
+ IDENTIFIER rsaEncryption
+ KEY RSAPublicKey
+ PARAMS TYPE NULL ARE absent
+ -- Private key format not in this module --
+ CERT-KEY-USAGE {digitalSignature, nonRepudiation,
+ keyEncipherment, dataEncipherment, keyCertSign, cRLSign}
+ }
+
+ rsaEncryption OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1)
+ pkcs-1(1) 1 }
+
+ RSAPublicKey ::= SEQUENCE {
+ modulus INTEGER, -- n
+ publicExponent INTEGER -- e
+ }
+
+ -- DSA PK Algorithm, Parameters, and Keys
+
+ pk-dsa PUBLIC-KEY ::= {
+ IDENTIFIER id-dsa
+ KEY DSAPublicKey
+ PARAMS TYPE DSA-Params ARE inheritable
+ -- Private key format not in this module --
+ CERT-KEY-USAGE { digitalSignature, nonRepudiation, keyCertSign,
+ cRLSign }
+ }
+
+ id-dsa OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) x9-57(10040) x9algorithm(4) 1 }
+
+ DSA-Params ::= SEQUENCE {
+ p INTEGER,
+ q INTEGER,
+ g INTEGER
+ }
+
+ DSAPublicKey ::= INTEGER -- public key, y
+
+ -- Diffie-Hellman PK Algorithm, Parameters, and Keys
+
+ pk-dh PUBLIC-KEY ::= {
+ IDENTIFIER dhpublicnumber
+ KEY DHPublicKey
+ PARAMS TYPE DomainParameters ARE inheritable
+ -- Private key format not in this module --
+ CERT-KEY-USAGE {keyAgreement, encipherOnly, decipherOnly }
+ }
+
+ dhpublicnumber OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-x942(10046)
+ number-type(2) 1 }
+
+ DomainParameters ::= SEQUENCE {
+ p INTEGER, -- odd prime, p=jq +1
+ g INTEGER, -- generator, g
+ q INTEGER, -- factor of p-1
+ j INTEGER OPTIONAL, -- subgroup factor, j>= 2
+ validationParams ValidationParams OPTIONAL
+ }
+
+ ValidationParams ::= SEQUENCE {
+ seed BIT STRING,
+ pgenCounter INTEGER
+ }
+
+ DiffieHellmanPublicNumber ::= INTEGER -- according to http://wikisec.free.fr/crypto/crypto.html
+
+ DHPublicKey ::= INTEGER -- public key, y = g^x mod p
+
+ -- KEA PK Algorithm and Parameters
+
+ pk-kea PUBLIC-KEY ::= {
+ IDENTIFIER id-keyExchangeAlgorithm
+ -- key is not encoded --
+ PARAMS TYPE KEA-Params-Id ARE required
+ -- Private key format not in this module --
+ CERT-KEY-USAGE {keyAgreement, encipherOnly, decipherOnly }
+ }
+ id-keyExchangeAlgorithm OBJECT IDENTIFIER ::= {
+ joint-iso-itu-t(2) country(16) us(840) organization(1)
+ gov(101) dod(2) infosec(1) algorithms(1) 22 }
+
+ KEA-Params-Id ::= OCTET STRING
+
+ -- Elliptic Curve (EC) Signatures: Unrestricted Algorithms
+ -- (Section 2.1.1 of RFC 5480)
+ --
+ -- EC Unrestricted Algorithm ID -- -- this is used for ECDSA
+
+ pk-ec PUBLIC-KEY ::= {
+ IDENTIFIER id-ecPublicKey
+ KEY ECPoint
+ PARAMS TYPE ECParameters ARE required
+ -- Private key format not in this module --
+ CERT-KEY-USAGE { digitalSignature, nonRepudiation, keyAgreement,
+ keyCertSign, cRLSign }
+ }
+
+ ECPoint ::= OCTET STRING -- see RFC 5480 for syntax and restrictions
+
+ id-ecPublicKey OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-X9-62(10045) keyType(2) 1 }
+
+ -- Elliptic Curve (EC) Signatures: Restricted Algorithms
+ -- (Section 2.1.2 of RFC 5480)
+ --
+ -- EC Diffie-Hellman Algorithm ID
+
+ pk-ecDH PUBLIC-KEY ::= {
+ IDENTIFIER id-ecDH
+ KEY ECPoint
+ PARAMS TYPE ECParameters ARE required
+ -- Private key format not in this module --
+ CERT-KEY-USAGE { keyAgreement, encipherOnly, decipherOnly }
+ }
+
+ id-ecDH OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) schemes(1)
+ ecdh(12) }
+
+ -- EC Menezes-Qu-Vanstone Algorithm ID
+
+ pk-ecMQV PUBLIC-KEY ::= {
+ IDENTIFIER id-ecMQV
+ KEY ECPoint
+ PARAMS TYPE ECParameters ARE required
+ -- Private key format not in this module --
+ CERT-KEY-USAGE { keyAgreement, encipherOnly, decipherOnly }
+ }
+
+ id-ecMQV OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) schemes(1)
+ ecmqv(13) }
+
+ -- Parameters and Keys for both Restricted and Unrestricted EC
+
+ ECParameters ::= CHOICE {
+ namedCurve CURVE.&id({NamedCurve})
+ -- implicitCurve NULL
+ -- implicitCurve MUST NOT be used in PKIX
+ -- specifiedCurve SpecifiedCurve
+ -- specifiedCurve MUST NOT be used in PKIX
+ -- Details for specifiedCurve can be found in [X9.62]
+ -- Any future additions to this CHOICE should be coordinated
+ -- with ANSI X.9.
+ }
+ -- If you need to be able to decode ANSI X.9 parameter structures,
+ -- uncomment the implicitCurve and specifiedCurve above, and also
+ -- uncomment the following:
+ --(WITH COMPONENTS {namedCurve PRESENT})
+
+ -- Sec 2.1.1.1 Named Curve
+
+ CURVE ::= CLASS { &id OBJECT IDENTIFIER UNIQUE }
+ WITH SYNTAX { ID &id }
+
+ NamedCurve CURVE ::= {
+ { ID secp192r1 } | { ID sect163k1 } | { ID sect163r2 } |
+ { ID secp224r1 } | { ID sect233k1 } | { ID sect233r1 } |
+ { ID secp256r1 } | { ID sect283k1 } | { ID sect283r1 } |
+ { ID secp384r1 } | { ID sect409k1 } | { ID sect409r1 } |
+ { ID secp521r1 } | { ID sect571k1 } | { ID sect571r1 },
+ ... -- Extensible
+ }
+
+ -- Note in [X9.62] the curves are referred to as 'ansiX9' as
+ -- opposed to 'sec'. For example, secp192r1 is the same curve as
+ -- ansix9p192r1.
+
+ -- Note that in [PKI-ALG] the secp192r1 curve was referred to as
+ -- prime192v1 and the secp256r1 curve was referred to as
+ -- prime256v1.
+
+ -- Note that [FIPS186-3] refers to secp192r1 as P-192,
+ -- secp224r1 as P-224, secp256r1 as P-256, secp384r1 as P-384,
+ -- and secp521r1 as P-521.
+
+ secp192r1 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-X9-62(10045) curves(3)
+ prime(1) 1 }
+
+ sect163k1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 1 }
+
+ sect163r2 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 15 }
+
+ secp224r1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 33 }
+
+ sect233k1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 26 }
+
+ sect233r1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 27 }
+
+ secp256r1 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-X9-62(10045) curves(3)
+ prime(1) 7 }
+
+ sect283k1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 16 }
+
+ sect283r1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 17 }
+
+ secp384r1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 34 }
+
+ sect409k1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 36 }
+
+ sect409r1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 37 }
+
+ secp521r1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 35 }
+
+ sect571k1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 38 }
+
+ sect571r1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 39 }
+
+ -- RSA with MD-2
+
+ sa-rsaWithMD2 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER md2WithRSAEncryption
+ PARAMS TYPE NULL ARE required
+ HASHES { mda-md2 }
+ PUBLIC-KEYS { pk-rsa }
+ SMIME-CAPS { IDENTIFIED BY md2WithRSAEncryption }
+ }
+
+ md2WithRSAEncryption OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1)
+ pkcs-1(1) 2 }
+
+ -- RSA with MD-5
+
+ sa-rsaWithMD5 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER md5WithRSAEncryption
+ PARAMS TYPE NULL ARE required
+ HASHES { mda-md5 }
+ PUBLIC-KEYS { pk-rsa }
+ SMIME-CAPS { IDENTIFIED BY md5WithRSAEncryption }
+ }
+
+ md5WithRSAEncryption OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1)
+ pkcs-1(1) 4 }
+
+ -- RSA with SHA-1
+
+ sa-rsaWithSHA1 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER sha1WithRSAEncryption
+ PARAMS TYPE NULL ARE required
+ HASHES { mda-sha1 }
+ PUBLIC-KEYS { pk-rsa }
+ SMIME-CAPS {IDENTIFIED BY sha1WithRSAEncryption }
+ }
+
+ sha1WithRSAEncryption OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1)
+ pkcs-1(1) 5 }
+
+ -- DSA with SHA-1
+
+ sa-dsaWithSHA1 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER dsa-with-sha1
+ VALUE DSA-Sig-Value
+ PARAMS TYPE NULL ARE absent
+ HASHES { mda-sha1 }
+ PUBLIC-KEYS { pk-dsa }
+ SMIME-CAPS { IDENTIFIED BY dsa-with-sha1 }
+ }
+
+ dsa-with-sha1 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) x9-57(10040) x9algorithm(4) 3 }
+
+ -- DSA with SHA-224
+
+ sa-dsaWithSHA224 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER dsa-with-sha224
+ VALUE DSA-Sig-Value
+ PARAMS TYPE NULL ARE absent
+ HASHES { mda-sha224 }
+ PUBLIC-KEYS { pk-dsa }
+ SMIME-CAPS { IDENTIFIED BY dsa-with-sha224 }
+ }
+
+ dsa-with-sha224 OBJECT IDENTIFIER ::= {
+ joint-iso-ccitt(2) country(16) us(840) organization(1) gov(101)
+ csor(3) algorithms(4) id-dsa-with-sha2(3) 1 }
+
+ -- DSA with SHA-256
+
+ sa-dsaWithSHA256 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER dsa-with-sha256
+ VALUE DSA-Sig-Value
+ PARAMS TYPE NULL ARE absent
+ HASHES { mda-sha256 }
+ PUBLIC-KEYS { pk-dsa }
+ SMIME-CAPS { IDENTIFIED BY dsa-with-sha256 }
+ }
+
+ dsa-with-sha256 OBJECT IDENTIFIER ::= {
+ joint-iso-ccitt(2) country(16) us(840) organization(1) gov(101)
+ csor(3) algorithms(4) id-dsa-with-sha2(3) 2 }
+
+ -- ECDSA with SHA-1
+
+ sa-ecdsaWithSHA1 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER ecdsa-with-SHA1
+ VALUE ECDSA-Sig-Value
+ PARAMS TYPE NULL ARE absent
+ HASHES { mda-sha1 }
+ PUBLIC-KEYS { pk-ec }
+ SMIME-CAPS {IDENTIFIED BY ecdsa-with-SHA1 }
+ }
+
+ ecdsa-with-SHA1 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-X9-62(10045)
+ signatures(4) 1 }
+
+ -- ECDSA with SHA-224
+
+ sa-ecdsaWithSHA224 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER ecdsa-with-SHA224
+ VALUE ECDSA-Sig-Value
+ PARAMS TYPE NULL ARE absent
+ HASHES { mda-sha224 }
+ PUBLIC-KEYS { pk-ec }
+ SMIME-CAPS { IDENTIFIED BY ecdsa-with-SHA224 }
+ }
+
+ ecdsa-with-SHA224 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-X9-62(10045) signatures(4)
+ ecdsa-with-SHA2(3) 1 }
+
+ -- ECDSA with SHA-256
+
+ sa-ecdsaWithSHA256 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER ecdsa-with-SHA256
+ VALUE ECDSA-Sig-Value
+ PARAMS TYPE NULL ARE absent
+ HASHES { mda-sha256 }
+ PUBLIC-KEYS { pk-ec }
+ SMIME-CAPS { IDENTIFIED BY ecdsa-with-SHA256 }
+ }
+
+ ecdsa-with-SHA256 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-X9-62(10045) signatures(4)
+ ecdsa-with-SHA2(3) 2 }
+
+ -- ECDSA with SHA-384
+
+ sa-ecdsaWithSHA384 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER ecdsa-with-SHA384
+ VALUE ECDSA-Sig-Value
+ PARAMS TYPE NULL ARE absent
+ HASHES { mda-sha384 }
+ PUBLIC-KEYS { pk-ec }
+ SMIME-CAPS { IDENTIFIED BY ecdsa-with-SHA384 }
+ }
+ ecdsa-with-SHA384 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-X9-62(10045) signatures(4)
+ ecdsa-with-SHA2(3) 3 }
+
+ -- ECDSA with SHA-512
+
+ sa-ecdsaWithSHA512 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER ecdsa-with-SHA512
+ VALUE ECDSA-Sig-Value
+ PARAMS TYPE NULL ARE absent
+ HASHES { mda-sha512 }
+ PUBLIC-KEYS { pk-ec }
+ SMIME-CAPS { IDENTIFIED BY ecdsa-with-SHA512 }
+ }
+
+ ecdsa-with-SHA512 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-X9-62(10045) signatures(4)
+ ecdsa-with-SHA2(3) 4 }
+
+ --
+ -- Signature Values
+ --
+
+ -- DSA
+
+ DSA-Sig-Value ::= SEQUENCE {
+ r INTEGER,
+ s INTEGER
+ }
+
+ -- ECDSA
+
+ ECDSA-Sig-Value ::= SEQUENCE {
+ r INTEGER,
+ s INTEGER
+ }
+
+ --
+ -- Message Digest Algorithms (mda-)
+ --
+
+ HashAlgs DIGEST-ALGORITHM ::= {
+ mda-md2 |
+ mda-md5 |
+ mda-sha1,
+ ... -- Extensible
+ }
+ -- MD-2
+
+ mda-md2 DIGEST-ALGORITHM ::= {
+ IDENTIFIER id-md2
+ PARAMS TYPE NULL ARE preferredAbsent
+ }
+
+ id-md2 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) rsadsi(113549)
+ digestAlgorithm(2) 2 }
+
+ -- MD-5
+
+ mda-md5 DIGEST-ALGORITHM ::= {
+ IDENTIFIER id-md5
+ PARAMS TYPE NULL ARE preferredAbsent
+ }
+
+ id-md5 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) rsadsi(113549)
+ digestAlgorithm(2) 5 }
+
+ -- SHA-1
+
+ mda-sha1 DIGEST-ALGORITHM ::= {
+ IDENTIFIER id-sha1
+ PARAMS TYPE NULL ARE preferredAbsent
+ }
+
+ id-sha1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) oiw(14) secsig(3)
+ algorithm(2) 26 }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAttributeCertificate-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAttributeCertificate-2009.asn1
new file mode 100644
index 0000000000..3ab074643f
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAttributeCertificate-2009.asn1
@@ -0,0 +1,292 @@
+ PKIXAttributeCertificate-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-attribute-cert-02(47)}
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ AttributeSet{}, Extensions{}, SecurityCategory{},
+ EXTENSION, ATTRIBUTE, SECURITY-CATEGORY
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57) }
+
+ AlgorithmIdentifier{}, SIGNATURE-ALGORITHM, DIGEST-ALGORITHM
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ -- IMPORTed module OIDs MAY change if [PKIXPROF] changes
+ -- PKIX Certificate Extensions
+
+ CertificateSerialNumber, UniqueIdentifier, id-pkix, id-pe, id-kp,
+ id-ad, id-at, SIGNED{}, SignatureAlgorithms
+ FROM PKIX1Explicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51)}
+
+ GeneralName, GeneralNames, id-ce, ext-AuthorityKeyIdentifier,
+ ext-AuthorityInfoAccess, ext-CRLDistributionPoints
+ FROM PKIX1Implicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)}
+
+ ContentInfo
+ FROM CryptographicMessageSyntax-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549)
+ pkcs(1) pkcs-9(9) smime(16) modules(0) id-mod-cms-2004-02(41) };
+ -- Define the set of extensions that can appear.
+ -- Some of these are imported from PKIX Cert
+
+ AttributeCertExtensions EXTENSION ::= {
+ ext-auditIdentity | ext-targetInformation |
+ ext-AuthorityKeyIdentifier | ext-AuthorityInfoAccess |
+ ext-CRLDistributionPoints | ext-noRevAvail | ext-ac-proxying |
+ ext-aaControls, ... }
+
+ ext-auditIdentity EXTENSION ::= { SYNTAX
+ OCTET STRING IDENTIFIED BY id-pe-ac-auditIdentity}
+
+ ext-targetInformation EXTENSION ::= { SYNTAX
+ Targets IDENTIFIED BY id-ce-targetInformation }
+
+ ext-noRevAvail EXTENSION ::= { SYNTAX
+ NULL IDENTIFIED BY id-ce-noRevAvail}
+
+ ext-ac-proxying EXTENSION ::= { SYNTAX
+ ProxyInfo IDENTIFIED BY id-pe-ac-proxying}
+
+ ext-aaControls EXTENSION ::= { SYNTAX
+ AAControls IDENTIFIED BY id-pe-aaControls}
+
+ -- Define the set of attributes used here
+
+ AttributesDefined ATTRIBUTE ::= { at-authenticationInfo |
+ at-accesIdentity | at-chargingIdentity | at-group |
+ at-role | at-clearance | at-encAttrs, ...}
+
+ at-authenticationInfo ATTRIBUTE ::= { TYPE SvceAuthInfo
+ IDENTIFIED BY id-aca-authenticationInfo}
+
+ at-accesIdentity ATTRIBUTE ::= { TYPE SvceAuthInfo
+ IDENTIFIED BY id-aca-accessIdentity}
+
+ at-chargingIdentity ATTRIBUTE ::= { TYPE IetfAttrSyntax
+ IDENTIFIED BY id-aca-chargingIdentity}
+
+ at-group ATTRIBUTE ::= { TYPE IetfAttrSyntax
+ IDENTIFIED BY id-aca-group}
+
+ at-role ATTRIBUTE ::= { TYPE RoleSyntax
+ IDENTIFIED BY id-at-role}
+
+ at-clearance ATTRIBUTE ::= { TYPE Clearance
+ IDENTIFIED BY id-at-clearance}
+ at-clearance-RFC3281 ATTRIBUTE ::= {TYPE Clearance-rfc3281
+ IDENTIFIED BY id-at-clearance-rfc3281 }
+
+ at-encAttrs ATTRIBUTE ::= { TYPE ContentInfo
+ IDENTIFIED BY id-aca-encAttrs}
+
+ --
+ -- OIDs used by Attribute Certificate Extensions
+ --
+
+ id-pe-ac-auditIdentity OBJECT IDENTIFIER ::= { id-pe 4 }
+ id-pe-aaControls OBJECT IDENTIFIER ::= { id-pe 6 }
+ id-pe-ac-proxying OBJECT IDENTIFIER ::= { id-pe 10 }
+ id-ce-targetInformation OBJECT IDENTIFIER ::= { id-ce 55 }
+ id-ce-noRevAvail OBJECT IDENTIFIER ::= { id-ce 56 }
+
+ --
+ -- OIDs used by Attribute Certificate Attributes
+ --
+
+ id-aca OBJECT IDENTIFIER ::= { id-pkix 10 }
+
+ id-aca-authenticationInfo OBJECT IDENTIFIER ::= { id-aca 1 }
+ id-aca-accessIdentity OBJECT IDENTIFIER ::= { id-aca 2 }
+ id-aca-chargingIdentity OBJECT IDENTIFIER ::= { id-aca 3 }
+ id-aca-group OBJECT IDENTIFIER ::= { id-aca 4 }
+ -- { id-aca 5 } is reserved
+ id-aca-encAttrs OBJECT IDENTIFIER ::= { id-aca 6 }
+
+ id-at-role OBJECT IDENTIFIER ::= { id-at 72}
+ id-at-clearance OBJECT IDENTIFIER ::= {
+ joint-iso-ccitt(2) ds(5) attributeType(4) clearance (55) }
+
+ -- Uncomment the following declaration and comment the above line if
+ -- using the id-at-clearance attribute as defined in [RFC3281]
+ -- id-at-clearance ::= id-at-clearance-3281
+
+ id-at-clearance-rfc3281 OBJECT IDENTIFIER ::= {
+ joint-iso-ccitt(2) ds(5) module(1) selected-attribute-types(5)
+ clearance (55) }
+
+ --
+ -- The syntax of an Attribute Certificate
+ --
+
+ AttributeCertificate ::= SIGNED{AttributeCertificateInfo}
+
+ AttributeCertificateInfo ::= SEQUENCE {
+ version AttCertVersion, -- version is v2
+ holder Holder,
+ issuer AttCertIssuer,
+ signature AlgorithmIdentifier{SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}},
+ serialNumber CertificateSerialNumber,
+ attrCertValidityPeriod AttCertValidityPeriod,
+ attributes SEQUENCE OF
+ AttributeSet{{AttributesDefined}},
+ issuerUniqueID UniqueIdentifier OPTIONAL,
+ extensions Extensions{{AttributeCertExtensions}} OPTIONAL
+ }
+
+ AttCertVersion ::= INTEGER { v2(1) }
+
+ Holder ::= SEQUENCE {
+ baseCertificateID [0] IssuerSerial OPTIONAL,
+ -- the issuer and serial number of
+ -- the holder's Public Key Certificate
+ entityName [1] GeneralNames OPTIONAL,
+ -- the name of the claimant or role
+ objectDigestInfo [2] ObjectDigestInfo OPTIONAL
+ -- used to directly authenticate the
+ -- holder, for example, an executable
+ }
+
+ ObjectDigestInfo ::= SEQUENCE {
+ digestedObjectType ENUMERATED {
+ publicKey (0),
+ publicKeyCert (1),
+ otherObjectTypes (2) },
+ -- otherObjectTypes MUST NOT
+ -- be used in this profile
+ otherObjectTypeID OBJECT IDENTIFIER OPTIONAL,
+ digestAlgorithm AlgorithmIdentifier{DIGEST-ALGORITHM, {...}},
+ objectDigest BIT STRING
+ }
+
+ AttCertIssuer ::= CHOICE {
+ v1Form GeneralNames, -- MUST NOT be used in this
+ -- profile
+ v2Form [0] V2Form -- v2 only
+ }
+
+ V2Form ::= SEQUENCE {
+ issuerName GeneralNames OPTIONAL,
+ baseCertificateID [0] IssuerSerial OPTIONAL,
+ objectDigestInfo [1] ObjectDigestInfo OPTIONAL
+ -- issuerName MUST be present in this profile
+ -- baseCertificateID and objectDigestInfo MUST
+ -- NOT be present in this profile
+ }
+
+ IssuerSerial ::= SEQUENCE {
+ issuer GeneralNames,
+ serial CertificateSerialNumber,
+ issuerUID UniqueIdentifier OPTIONAL
+ }
+
+ AttCertValidityPeriod ::= SEQUENCE {
+ notBeforeTime GeneralizedTime,
+ notAfterTime GeneralizedTime
+ }
+
+ --
+ -- Syntax used by Attribute Certificate Extensions
+ --
+
+ Targets ::= SEQUENCE OF Target
+
+ Target ::= CHOICE {
+ targetName [0] GeneralName,
+ targetGroup [1] GeneralName,
+ targetCert [2] TargetCert
+ }
+
+ TargetCert ::= SEQUENCE {
+ targetCertificate IssuerSerial,
+ targetName GeneralName OPTIONAL,
+ certDigestInfo ObjectDigestInfo OPTIONAL
+ }
+
+ AAControls ::= SEQUENCE {
+ pathLenConstraint INTEGER (0..MAX) OPTIONAL,
+ permittedAttrs [0] AttrSpec OPTIONAL,
+ excludedAttrs [1] AttrSpec OPTIONAL,
+ permitUnSpecified BOOLEAN DEFAULT TRUE
+ }
+
+ AttrSpec::= SEQUENCE OF OBJECT IDENTIFIER
+
+ ProxyInfo ::= SEQUENCE OF Targets
+
+ --
+ -- Syntax used by Attribute Certificate Attributes
+ --
+ IetfAttrSyntax ::= SEQUENCE {
+ policyAuthority[0] GeneralNames OPTIONAL,
+ values SEQUENCE OF CHOICE {
+ octets OCTET STRING,
+ oid OBJECT IDENTIFIER,
+ string UTF8String
+ }
+ }
+
+ SvceAuthInfo ::= SEQUENCE {
+ service GeneralName,
+ ident GeneralName,
+ authInfo OCTET STRING OPTIONAL
+ }
+
+ RoleSyntax ::= SEQUENCE {
+ roleAuthority [0] GeneralNames OPTIONAL,
+ roleName [1] GeneralName
+ }
+
+ Clearance ::= SEQUENCE {
+ policyId OBJECT IDENTIFIER,
+ classList ClassList DEFAULT {unclassified},
+ securityCategories SET OF SecurityCategory
+ {{SupportedSecurityCategories}} OPTIONAL
+ }
+
+ -- Uncomment the following lines to support deprecated clearance
+ -- syntax and comment out previous Clearance.
+
+ -- Clearance ::= Clearance-rfc3281
+
+ Clearance-rfc3281 ::= SEQUENCE {
+ policyId [0] OBJECT IDENTIFIER,
+ classList [1] ClassList DEFAULT {unclassified},
+ securityCategories [2] SET OF SecurityCategory-rfc3281
+ {{SupportedSecurityCategories}} OPTIONAL
+ }
+
+ ClassList ::= BIT STRING {
+ unmarked (0),
+ unclassified (1),
+ restricted (2),
+ confidential (3),
+ secret (4),
+ topSecret (5)
+ }
+ SupportedSecurityCategories SECURITY-CATEGORY ::= { ... }
+
+ SecurityCategory-rfc3281{SECURITY-CATEGORY:Supported} ::= SEQUENCE {
+ type [0] IMPLICIT SECURITY-CATEGORY.
+ &id({Supported}),
+ value [1] EXPLICIT SECURITY-CATEGORY.
+ &Type({Supported}{@type})
+ }
+
+ ACClearAttrs ::= SEQUENCE {
+ acIssuer GeneralName,
+ acSerial INTEGER,
+ attrs SEQUENCE OF AttributeSet{{AttributesDefined}}
+ }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCMP-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCMP-2009.asn1
new file mode 100644
index 0000000000..968a142f28
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCMP-2009.asn1
@@ -0,0 +1,495 @@
+ PKIXCMP-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-cmp2000-02(50) }
+ DEFINITIONS EXPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ AttributeSet{}, Extensions{}, EXTENSION, ATTRIBUTE
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57)}
+
+ AlgorithmIdentifier{}, SIGNATURE-ALGORITHM, ALGORITHM,
+ DIGEST-ALGORITHM, MAC-ALGORITHM
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ Certificate, CertificateList
+ FROM PKIX1Explicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51)}
+
+ GeneralName, KeyIdentifier
+ FROM PKIX1Implicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)}
+
+ CertTemplate, PKIPublicationInfo, EncryptedValue, CertId,
+ CertReqMessages
+ FROM PKIXCRMF-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-crmf2005-02(55) }
+ -- see also the behavioral clarifications to CRMF codified in
+ -- Appendix C of this specification
+
+ CertificationRequest
+ FROM PKCS-10
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkcs10-2009(69)}
+ -- (specified in RFC 2986 with 1993 ASN.1 syntax and IMPLICIT
+ -- tags). Alternatively, implementers may directly include
+ -- the [PKCS10] syntax in this module
+ ;
+
+ -- the rest of the module contains locally defined OIDs and
+ -- constructs
+
+ CMPCertificate ::= CHOICE { x509v3PKCert Certificate, ... }
+ -- This syntax, while bits-on-the-wire compatible with the
+ -- standard X.509 definition of "Certificate", allows the
+ -- possibility of future certificate types (such as X.509
+ -- attribute certificates, WAP WTLS certificates, or other kinds
+ -- of certificates) within this certificate management protocol,
+ -- should a need ever arise to support such generality. Those
+ -- implementations that do not foresee a need to ever support
+ -- other certificate types MAY, if they wish, comment out the
+ -- above structure and "uncomment" the following one prior to
+ -- compiling this ASN.1 module. (Note that interoperability
+ -- with implementations that don't do this will be unaffected by
+ -- this change.)
+
+ -- CMPCertificate ::= Certificate
+
+ PKIMessage ::= SEQUENCE {
+ header PKIHeader,
+ body PKIBody,
+ protection [0] PKIProtection OPTIONAL,
+ extraCerts [1] SEQUENCE SIZE (1..MAX) OF CMPCertificate
+ OPTIONAL }
+
+ PKIMessages ::= SEQUENCE SIZE (1..MAX) OF PKIMessage
+
+ PKIHeader ::= SEQUENCE {
+ pvno INTEGER { cmp1999(1), cmp2000(2) },
+ sender GeneralName,
+ -- identifies the sender
+ recipient GeneralName,
+ -- identifies the intended recipient
+ messageTime [0] GeneralizedTime OPTIONAL,
+ -- time of production of this message (used when sender
+ -- believes that the transport will be "suitable"; i.e.,
+ -- that the time will still be meaningful upon receipt)
+ protectionAlg [1] AlgorithmIdentifier{ALGORITHM, {...}}
+ OPTIONAL,
+ -- algorithm used for calculation of protection bits
+ senderKID [2] KeyIdentifier OPTIONAL,
+ recipKID [3] KeyIdentifier OPTIONAL,
+ -- to identify specific keys used for protection
+ transactionID [4] OCTET STRING OPTIONAL,
+ -- identifies the transaction; i.e., this will be the same in
+ -- corresponding request, response, certConf, and PKIConf
+ -- messages
+ senderNonce [5] OCTET STRING OPTIONAL,
+ recipNonce [6] OCTET STRING OPTIONAL,
+ -- nonces used to provide replay protection, senderNonce
+ -- is inserted by the creator of this message; recipNonce
+ -- is a nonce previously inserted in a related message by
+ -- the intended recipient of this message
+ freeText [7] PKIFreeText OPTIONAL,
+ -- this may be used to indicate context-specific instructions
+ -- (this field is intended for human consumption)
+ generalInfo [8] SEQUENCE SIZE (1..MAX) OF
+ InfoTypeAndValue OPTIONAL
+ -- this may be used to convey context-specific information
+ -- (this field not primarily intended for human consumption)
+ }
+
+ PKIFreeText ::= SEQUENCE SIZE (1..MAX) OF UTF8String
+ -- text encoded as UTF-8 String [RFC3629] (note: each
+ -- UTF8String MAY include an [RFC3066] language tag
+ -- to indicate the language of the contained text;
+ -- see [RFC2482] for details)
+
+ PKIBody ::= CHOICE { -- message-specific body elements
+ ir [0] CertReqMessages, --Initialization Request
+ ip [1] CertRepMessage, --Initialization Response
+ cr [2] CertReqMessages, --Certification Request
+ cp [3] CertRepMessage, --Certification Response
+ p10cr [4] CertificationRequest, --imported from [PKCS10]
+ popdecc [5] POPODecKeyChallContent, --pop Challenge
+ popdecr [6] POPODecKeyRespContent, --pop Response
+ kur [7] CertReqMessages, --Key Update Request
+ kup [8] CertRepMessage, --Key Update Response
+ krr [9] CertReqMessages, --Key Recovery Request
+ krp [10] KeyRecRepContent, --Key Recovery Response
+ rr [11] RevReqContent, --Revocation Request
+ rp [12] RevRepContent, --Revocation Response
+ ccr [13] CertReqMessages, --Cross-Cert. Request
+ ccp [14] CertRepMessage, --Cross-Cert. Response
+ ckuann [15] CAKeyUpdAnnContent, --CA Key Update Ann.
+ cann [16] CertAnnContent, --Certificate Ann.
+ rann [17] RevAnnContent, --Revocation Ann.
+ crlann [18] CRLAnnContent, --CRL Announcement
+ pkiconf [19] PKIConfirmContent, --Confirmation
+ nested [20] NestedMessageContent, --Nested Message
+ genm [21] GenMsgContent, --General Message
+ genp [22] GenRepContent, --General Response
+ error [23] ErrorMsgContent, --Error Message
+ certConf [24] CertConfirmContent, --Certificate confirm
+ pollReq [25] PollReqContent, --Polling request
+ pollRep [26] PollRepContent --Polling response
+ }
+
+ PKIProtection ::= BIT STRING
+
+ ProtectedPart ::= SEQUENCE {
+ header PKIHeader,
+ body PKIBody }
+
+ id-PasswordBasedMac OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ usa(840) nt(113533) nsn(7) algorithms(66) 13 }
+ PBMParameter ::= SEQUENCE {
+ salt OCTET STRING,
+ -- note: implementations MAY wish to limit acceptable sizes
+ -- of this string to values appropriate for their environment
+ -- in order to reduce the risk of denial-of-service attacks
+ owf AlgorithmIdentifier{DIGEST-ALGORITHM, {...}},
+ -- AlgId for a One-Way Function (SHA-1 recommended)
+ iterationCount INTEGER,
+ -- number of times the OWF is applied
+ -- note: implementations MAY wish to limit acceptable sizes
+ -- of this integer to values appropriate for their environment
+ -- in order to reduce the risk of denial-of-service attacks
+ mac AlgorithmIdentifier{MAC-ALGORITHM, {...}}
+ -- the MAC AlgId (e.g., DES-MAC, Triple-DES-MAC [PKCS11],
+ -- or HMAC [RFC2104, RFC2202])
+ }
+
+ id-DHBasedMac OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ usa(840) nt(113533) nsn(7) algorithms(66) 30 }
+ DHBMParameter ::= SEQUENCE {
+ owf AlgorithmIdentifier{DIGEST-ALGORITHM, {...}},
+ -- AlgId for a One-Way Function (SHA-1 recommended)
+ mac AlgorithmIdentifier{MAC-ALGORITHM, {...}}
+ -- the MAC AlgId (e.g., DES-MAC, Triple-DES-MAC [PKCS11],
+ -- or HMAC [RFC2104, RFC2202])
+ }
+
+ PKIStatus ::= INTEGER {
+ accepted (0),
+ -- you got exactly what you asked for
+ grantedWithMods (1),
+ -- you got something like what you asked for; the
+ -- requester is responsible for ascertaining the differences
+ rejection (2),
+ -- you don't get it, more information elsewhere in the message
+ waiting (3),
+ -- the request body part has not yet been processed; expect to
+ -- hear more later (note: proper handling of this status
+ -- response MAY use the polling req/rep PKIMessages specified
+ -- in Section 5.3.22; alternatively, polling in the underlying
+ -- transport layer MAY have some utility in this regard)
+ revocationWarning (4),
+ -- this message contains a warning that a revocation is
+ -- imminent
+ revocationNotification (5),
+ -- notification that a revocation has occurred
+ keyUpdateWarning (6)
+ -- update already done for the oldCertId specified in
+ -- CertReqMsg
+ }
+
+ PKIFailureInfo ::= BIT STRING {
+ -- since we can fail in more than one way!
+ -- More codes may be added in the future if/when required.
+ badAlg (0),
+ -- unrecognized or unsupported Algorithm Identifier
+ badMessageCheck (1),
+ -- integrity check failed (e.g., signature did not verify)
+ badRequest (2),
+ -- transaction not permitted or supported
+ badTime (3),
+ -- messageTime was not sufficiently close to the system time,
+ -- as defined by local policy
+ badCertId (4),
+ -- no certificate could be found matching the provided criteria
+ badDataFormat (5),
+ -- the data submitted has the wrong format
+ wrongAuthority (6),
+ -- the authority indicated in the request is different from the
+ -- one creating the response token
+ incorrectData (7),
+ -- the requester's data is incorrect (for notary services)
+ missingTimeStamp (8),
+ -- when the timestamp is missing but should be there
+ -- (by policy)
+ badPOP (9),
+ -- the proof-of-possession failed
+ certRevoked (10),
+ -- the certificate has already been revoked
+ certConfirmed (11),
+ -- the certificate has already been confirmed
+ wrongIntegrity (12),
+ -- invalid integrity, password based instead of signature or
+ -- vice versa
+ badRecipientNonce (13),
+ -- invalid recipient nonce, either missing or wrong value
+ timeNotAvailable (14),
+ -- the TSA's time source is not available
+ unacceptedPolicy (15),
+ -- the requested TSA policy is not supported by the TSA
+ unacceptedExtension (16),
+ -- the requested extension is not supported by the TSA
+ addInfoNotAvailable (17),
+ -- the additional information requested could not be
+ -- understood or is not available
+ badSenderNonce (18),
+ -- invalid sender nonce, either missing or wrong size
+ badCertTemplate (19),
+ -- invalid cert. template or missing mandatory information
+ signerNotTrusted (20),
+ -- signer of the message unknown or not trusted
+ transactionIdInUse (21),
+ -- the transaction identifier is already in use
+ unsupportedVersion (22),
+ -- the version of the message is not supported
+ notAuthorized (23),
+ -- the sender was not authorized to make the preceding
+ -- request or perform the preceding action
+ systemUnavail (24),
+ -- the request cannot be handled due to system unavailability
+ systemFailure (25),
+ -- the request cannot be handled due to system failure
+ duplicateCertReq (26)
+ -- certificate cannot be issued because a duplicate
+ -- certificate already exists
+ }
+
+ PKIStatusInfo ::= SEQUENCE {
+ status PKIStatus,
+ statusString PKIFreeText OPTIONAL,
+ failInfo PKIFailureInfo OPTIONAL }
+
+ OOBCert ::= CMPCertificate
+
+ OOBCertHash ::= SEQUENCE {
+ hashAlg [0] AlgorithmIdentifier{DIGEST-ALGORITHM, {...}}
+ OPTIONAL,
+ certId [1] CertId OPTIONAL,
+ hashVal BIT STRING
+ -- hashVal is calculated over the DER encoding of the
+ -- self-signed certificate with the identifier certID.
+ }
+
+ POPODecKeyChallContent ::= SEQUENCE OF Challenge
+ -- One Challenge per encryption key certification request (in the
+ -- same order as these requests appear in CertReqMessages).
+
+ Challenge ::= SEQUENCE {
+ owf AlgorithmIdentifier{DIGEST-ALGORITHM, {...}}
+ OPTIONAL,
+ -- MUST be present in the first Challenge; MAY be omitted in
+ -- any subsequent Challenge in POPODecKeyChallContent (if
+ -- omitted, then the owf used in the immediately preceding
+ -- Challenge is to be used).
+ witness OCTET STRING,
+ -- the result of applying the one-way function (owf) to a
+ -- randomly-generated INTEGER, A. [Note that a different
+ -- INTEGER MUST be used for each Challenge.]
+ challenge OCTET STRING
+ -- the encryption (under the public key for which the cert.
+ -- request is being made) of Rand, where Rand is specified as
+ -- Rand ::= SEQUENCE {
+ -- int INTEGER,
+ -- - the randomly-generated INTEGER A (above)
+ -- sender GeneralName
+ -- - the sender's name (as included in PKIHeader)
+ -- }
+ }
+
+ POPODecKeyRespContent ::= SEQUENCE OF INTEGER
+ -- One INTEGER per encryption key certification request (in the
+ -- same order as these requests appear in CertReqMessages). The
+ -- retrieved INTEGER A (above) is returned to the sender of the
+ -- corresponding Challenge.
+
+ CertRepMessage ::= SEQUENCE {
+ caPubs [1] SEQUENCE SIZE (1..MAX) OF CMPCertificate
+ OPTIONAL,
+ response SEQUENCE OF CertResponse }
+
+ CertResponse ::= SEQUENCE {
+ certReqId INTEGER,
+ -- to match this response with the corresponding request (a value
+ -- of -1 is to be used if certReqId is not specified in the
+ -- corresponding request)
+ status PKIStatusInfo,
+ certifiedKeyPair CertifiedKeyPair OPTIONAL,
+ rspInfo OCTET STRING OPTIONAL
+ -- analogous to the id-regInfo-utf8Pairs string defined
+ -- for regInfo in CertReqMsg [RFC4211]
+ }
+
+ CertifiedKeyPair ::= SEQUENCE {
+ certOrEncCert CertOrEncCert,
+ privateKey [0] EncryptedValue OPTIONAL,
+ -- see [RFC4211] for comment on encoding
+ publicationInfo [1] PKIPublicationInfo OPTIONAL }
+
+ CertOrEncCert ::= CHOICE {
+ certificate [0] CMPCertificate,
+ encryptedCert [1] EncryptedValue }
+ KeyRecRepContent ::= SEQUENCE {
+ status PKIStatusInfo,
+ newSigCert [0] CMPCertificate OPTIONAL,
+ caCerts [1] SEQUENCE SIZE (1..MAX) OF
+ CMPCertificate OPTIONAL,
+ keyPairHist [2] SEQUENCE SIZE (1..MAX) OF
+ CertifiedKeyPair OPTIONAL }
+
+ RevReqContent ::= SEQUENCE OF RevDetails
+
+ RevDetails ::= SEQUENCE {
+ certDetails CertTemplate,
+ -- allows requester to specify as much as they can about
+ -- the cert. for which revocation is requested
+ -- (e.g., for cases in which serialNumber is not available)
+ crlEntryDetails Extensions{{...}} OPTIONAL
+ -- requested crlEntryExtensions
+ }
+
+ RevRepContent ::= SEQUENCE {
+ status SEQUENCE SIZE (1..MAX) OF PKIStatusInfo,
+ -- in same order as was sent in RevReqContent
+ revCerts [0] SEQUENCE SIZE (1..MAX) OF CertId OPTIONAL,
+ -- IDs for which revocation was requested
+ -- (same order as status)
+ crls [1] SEQUENCE SIZE (1..MAX) OF CertificateList OPTIONAL
+ -- the resulting CRLs (there may be more than one)
+ }
+
+ CAKeyUpdAnnContent ::= SEQUENCE {
+ oldWithNew CMPCertificate, -- old pub signed with new priv
+ newWithOld CMPCertificate, -- new pub signed with old priv
+ newWithNew CMPCertificate -- new pub signed with new priv
+ }
+
+ CertAnnContent ::= CMPCertificate
+
+ RevAnnContent ::= SEQUENCE {
+ status PKIStatus,
+ certId CertId,
+ willBeRevokedAt GeneralizedTime,
+ badSinceDate GeneralizedTime,
+ crlDetails Extensions{{...}} OPTIONAL
+ -- extra CRL details (e.g., crl number, reason, location, etc.)
+ }
+
+ CRLAnnContent ::= SEQUENCE OF CertificateList
+ PKIConfirmContent ::= NULL
+
+ NestedMessageContent ::= PKIMessages
+
+ INFO-TYPE-AND-VALUE ::= TYPE-IDENTIFIER
+
+ InfoTypeAndValue ::= SEQUENCE {
+ infoType INFO-TYPE-AND-VALUE.
+ &id({SupportedInfoSet}),
+ infoValue INFO-TYPE-AND-VALUE.
+ &Type({SupportedInfoSet}{@infoType}) }
+
+ SupportedInfoSet INFO-TYPE-AND-VALUE ::= { ... }
+
+ -- Example InfoTypeAndValue contents include, but are not limited
+ -- to, the following (uncomment in this ASN.1 module and use as
+ -- appropriate for a given environment):
+ --
+ -- id-it-caProtEncCert OBJECT IDENTIFIER ::= {id-it 1}
+ -- CAProtEncCertValue ::= CMPCertificate
+ -- id-it-signKeyPairTypes OBJECT IDENTIFIER ::= {id-it 2}
+ -- SignKeyPairTypesValue ::= SEQUENCE OF
+ -- AlgorithmIdentifier{{...}}
+ -- id-it-encKeyPairTypes OBJECT IDENTIFIER ::= {id-it 3}
+ -- EncKeyPairTypesValue ::= SEQUENCE OF
+ -- AlgorithmIdentifier{{...}}
+ -- id-it-preferredSymmAlg OBJECT IDENTIFIER ::= {id-it 4}
+ -- PreferredSymmAlgValue ::= AlgorithmIdentifier{{...}}
+ -- id-it-caKeyUpdateInfo OBJECT IDENTIFIER ::= {id-it 5}
+ -- CAKeyUpdateInfoValue ::= CAKeyUpdAnnContent
+ -- id-it-currentCRL OBJECT IDENTIFIER ::= {id-it 6}
+ -- CurrentCRLValue ::= CertificateList
+ -- id-it-unsupportedOIDs OBJECT IDENTIFIER ::= {id-it 7}
+ -- UnsupportedOIDsValue ::= SEQUENCE OF OBJECT IDENTIFIER
+ -- id-it-keyPairParamReq OBJECT IDENTIFIER ::= {id-it 10}
+ -- KeyPairParamReqValue ::= OBJECT IDENTIFIER
+ -- id-it-keyPairParamRep OBJECT IDENTIFIER ::= {id-it 11}
+ -- KeyPairParamRepValue ::= AlgorithmIdentifer
+ -- id-it-revPassphrase OBJECT IDENTIFIER ::= {id-it 12}
+ -- RevPassphraseValue ::= EncryptedValue
+ -- id-it-implicitConfirm OBJECT IDENTIFIER ::= {id-it 13}
+ -- ImplicitConfirmValue ::= NULL
+ -- id-it-confirmWaitTime OBJECT IDENTIFIER ::= {id-it 14}
+ -- ConfirmWaitTimeValue ::= GeneralizedTime
+ -- id-it-origPKIMessage OBJECT IDENTIFIER ::= {id-it 15}
+ -- OrigPKIMessageValue ::= PKIMessages
+ -- id-it-suppLangTags OBJECT IDENTIFIER ::= {id-it 16}
+ -- SuppLangTagsValue ::= SEQUENCE OF UTF8String
+ --
+ -- where
+ --
+ -- id-pkix OBJECT IDENTIFIER ::= {
+ -- iso(1) identified-organization(3)
+ -- dod(6) internet(1) security(5) mechanisms(5) pkix(7)}
+ -- and
+ -- id-it OBJECT IDENTIFIER ::= {id-pkix 4}
+ --
+ --
+ -- This construct MAY also be used to define new PKIX Certificate
+ -- Management Protocol request and response messages, or general-
+ -- purpose (e.g., announcement) messages for future needs or for
+ -- specific environments.
+
+ GenMsgContent ::= SEQUENCE OF InfoTypeAndValue
+
+ -- May be sent by EE, RA, or CA (depending on message content).
+ -- The OPTIONAL infoValue parameter of InfoTypeAndValue will
+ -- typically be omitted for some of the examples given above.
+ -- The receiver is free to ignore any contained OBJECT IDs that it
+ -- does not recognize. If sent from EE to CA, the empty set
+ -- indicates that the CA may send
+ -- any/all information that it wishes.
+
+ GenRepContent ::= SEQUENCE OF InfoTypeAndValue
+ -- Receiver MAY ignore any contained OIDs that it does not
+ -- recognize.
+
+ ErrorMsgContent ::= SEQUENCE {
+ pKIStatusInfo PKIStatusInfo,
+ errorCode INTEGER OPTIONAL,
+ -- implementation-specific error codes
+ errorDetails PKIFreeText OPTIONAL
+ -- implementation-specific error details
+ }
+
+ CertConfirmContent ::= SEQUENCE OF CertStatus
+
+ CertStatus ::= SEQUENCE {
+ certHash OCTET STRING,
+ -- the hash of the certificate, using the same hash algorithm
+ -- as is used to create and verify the certificate signature
+ certReqId INTEGER,
+ -- to match this confirmation with the corresponding req/rep
+ statusInfo PKIStatusInfo OPTIONAL }
+
+ PollReqContent ::= SEQUENCE OF SEQUENCE {
+ certReqId INTEGER }
+
+ PollRepContent ::= SEQUENCE OF SEQUENCE {
+ certReqId INTEGER,
+ checkAfter INTEGER, -- time in seconds
+ reason PKIFreeText OPTIONAL }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCRMF-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCRMF-2009.asn1
new file mode 100644
index 0000000000..1c0b780499
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCRMF-2009.asn1
@@ -0,0 +1,409 @@
+ PKIXCRMF-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-crmf2005-02(55)}
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ AttributeSet{}, Extensions{}, EXTENSION, ATTRIBUTE,
+ SingleAttribute{}
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1)
+ security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkixCommon-02(57) }
+
+ AlgorithmIdentifier{}, SIGNATURE-ALGORITHM, ALGORITHM,
+ DIGEST-ALGORITHM, MAC-ALGORITHM, PUBLIC-KEY
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ Version, Name, Time, SubjectPublicKeyInfo, UniqueIdentifier, id-pkix,
+ SignatureAlgorithms
+ FROM PKIX1Explicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51)}
+
+ GeneralName, CertExtensions
+ FROM PKIX1Implicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)}
+
+ EnvelopedData, CONTENT-TYPE
+ FROM CryptographicMessageSyntax-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cms-2004-02(41)}
+ maca-hMAC-SHA1
+ FROM CryptographicMessageSyntaxAlgorithms-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cmsalg-2001-02(37) }
+
+ mda-sha1
+ FROM PKIXAlgs-2009
+ { iso(1) identified-organization(3) dod(6)
+ internet(1) security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-algorithms2008-02(56) } ;
+
+ -- arc for Internet X.509 PKI protocols and their components
+
+ id-pkip OBJECT IDENTIFIER ::= { id-pkix 5 }
+
+ id-smime OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs9(9) 16 }
+
+ id-ct OBJECT IDENTIFIER ::= { id-smime 1 } -- content types
+
+ -- Core definitions for this module
+
+ CertReqMessages ::= SEQUENCE SIZE (1..MAX) OF CertReqMsg
+
+ CertReqMsg ::= SEQUENCE {
+ certReq CertRequest,
+ popo ProofOfPossession OPTIONAL,
+ -- content depends upon key type
+ regInfo SEQUENCE SIZE(1..MAX) OF
+ SingleAttribute{{RegInfoSet}} OPTIONAL }
+
+ CertRequest ::= SEQUENCE {
+ certReqId INTEGER,
+ -- ID for matching request and reply
+ certTemplate CertTemplate,
+ -- Selected fields of cert to be issued
+ controls Controls OPTIONAL }
+ -- Attributes affecting issuance
+
+ CertTemplate ::= SEQUENCE {
+ version [0] Version OPTIONAL,
+ serialNumber [1] INTEGER OPTIONAL,
+ signingAlg [2] AlgorithmIdentifier{SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}} OPTIONAL,
+ issuer [3] Name OPTIONAL,
+ validity [4] OptionalValidity OPTIONAL,
+ subject [5] Name OPTIONAL,
+ publicKey [6] SubjectPublicKeyInfo OPTIONAL,
+ issuerUID [7] UniqueIdentifier OPTIONAL,
+ subjectUID [8] UniqueIdentifier OPTIONAL,
+ extensions [9] Extensions{{CertExtensions}} OPTIONAL }
+
+ OptionalValidity ::= SEQUENCE {
+ notBefore [0] Time OPTIONAL,
+ notAfter [1] Time OPTIONAL } -- at least one MUST be present
+
+ Controls ::= SEQUENCE SIZE(1..MAX) OF SingleAttribute
+ {{RegControlSet}}
+
+ ProofOfPossession ::= CHOICE {
+ raVerified [0] NULL,
+ -- used if the RA has already verified that the requester is in
+ -- possession of the private key
+ signature [1] POPOSigningKey,
+ keyEncipherment [2] POPOPrivKey,
+ keyAgreement [3] POPOPrivKey }
+
+ POPOSigningKey ::= SEQUENCE {
+ poposkInput [0] POPOSigningKeyInput OPTIONAL,
+ algorithmIdentifier AlgorithmIdentifier{SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}},
+ signature BIT STRING }
+ -- The signature (using "algorithmIdentifier") is on the
+ -- DER-encoded value of poposkInput. NOTE: If the CertReqMsg
+ -- certReq CertTemplate contains the subject and publicKey values,
+ -- then poposkInput MUST be omitted and the signature MUST be
+ -- computed over the DER-encoded value of CertReqMsg certReq. If
+ -- the CertReqMsg certReq CertTemplate does not contain both the
+ -- public key and subject values (i.e., if it contains only one
+ -- of these, or neither), then poposkInput MUST be present and
+ -- MUST be signed.
+
+ POPOSigningKeyInput ::= SEQUENCE {
+ authInfo CHOICE {
+ sender [0] GeneralName,
+ -- used only if an authenticated identity has been
+ -- established for the sender (e.g., a DN from a
+ -- previously-issued and currently-valid certificate)
+ publicKeyMAC PKMACValue },
+ -- used if no authenticated GeneralName currently exists for
+ -- the sender; publicKeyMAC contains a password-based MAC
+ -- on the DER-encoded value of publicKey
+ publicKey SubjectPublicKeyInfo } -- from CertTemplate
+
+ PKMACValue ::= SEQUENCE {
+ algId AlgorithmIdentifier{MAC-ALGORITHM,
+ {Password-MACAlgorithms}},
+ value BIT STRING }
+
+ --
+ -- Define the currently only acceptable MAC algorithm to be used
+ -- for the PKMACValue structure
+ --
+
+ id-PasswordBasedMac OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ usa(840) nt(113533) nsn(7) algorithms(66) 13 }
+
+ Password-MACAlgorithms MAC-ALGORITHM ::= {
+ {IDENTIFIER id-PasswordBasedMac
+ PARAMS TYPE PBMParameter ARE required
+ IS-KEYED-MAC TRUE
+ }, ...
+ }
+
+ PBMParameter ::= SEQUENCE {
+ salt OCTET STRING,
+ owf AlgorithmIdentifier{DIGEST-ALGORITHM,
+ {DigestAlgorithms}},
+ -- AlgId for a One-Way Function (SHA-1 recommended)
+ iterationCount INTEGER,
+ -- number of times the OWF is applied
+ mac AlgorithmIdentifier{MAC-ALGORITHM,
+ {MACAlgorithms}}
+ -- the MAC AlgId (e.g., DES-MAC, Triple-DES-MAC, or HMAC
+ }
+
+ DigestAlgorithms DIGEST-ALGORITHM ::= {
+ mda-sha1, ...
+ }
+
+ MACAlgorithms MAC-ALGORITHM ::= {
+ -- The modules containing the ASN.1 for the DES and 3DES MAC
+ -- algorithms have not been updated at the time that this is
+ -- being published. Users of this module should define the
+ -- appropriate MAC-ALGORITHM objects and uncomment the
+ -- following lines if they support these MAC algorithms.
+ -- maca-des-mac | maca-3des-mac --
+ maca-hMAC-SHA1,
+ ...
+ }
+
+ POPOPrivKey ::= CHOICE {
+ thisMessage [0] BIT STRING, -- Deprecated
+ -- possession is proven in this message (which contains
+ -- the private key itself (encrypted for the CA))
+ subsequentMessage [1] SubsequentMessage,
+ -- possession will be proven in a subsequent message
+ dhMAC [2] BIT STRING, -- Deprecated
+ agreeMAC [3] PKMACValue,
+ encryptedKey [4] EnvelopedData }
+ -- for keyAgreement (only), possession is proven in this message
+ -- (which contains a MAC (over the DER-encoded value of the
+ -- certReq parameter in CertReqMsg, which MUST include both
+ -- subject and publicKey) based on a key derived from the end
+ -- entity's private DH key and the CA's public DH key);
+
+ SubsequentMessage ::= INTEGER {
+ encrCert (0),
+ -- requests that resulting certificate be encrypted for the
+ -- end entity (following which, POP will be proven in a
+ -- confirmation message)
+ challengeResp (1) }
+ -- requests that CA engage in challenge-response exchange with
+ -- end entity in order to prove private key possession
+
+ --
+ -- id-ct-encKeyWithID content type used as the content type for the
+ -- EnvelopedData in POPOPrivKey.
+ -- It contains both a private key and an identifier for key escrow
+ -- agents to check against recovery requestors.
+ --
+
+ ct-encKeyWithID CONTENT-TYPE ::=
+ { EncKeyWithID IDENTIFIED BY id-ct-encKeyWithID }
+
+ id-ct-encKeyWithID OBJECT IDENTIFIER ::= {id-ct 21}
+
+ EncKeyWithID ::= SEQUENCE {
+ privateKey PrivateKeyInfo,
+ identifier CHOICE {
+ string UTF8String,
+ generalName GeneralName
+ } OPTIONAL
+ }
+
+ PrivateKeyInfo ::= SEQUENCE {
+ version INTEGER,
+ privateKeyAlgorithm AlgorithmIdentifier{PUBLIC-KEY, {...}},
+ privateKey OCTET STRING,
+ -- Structure of public key is in PUBLIC-KEY.&PrivateKey
+ attributes [0] IMPLICIT Attributes OPTIONAL
+ }
+
+ Attributes ::= SET OF AttributeSet{{PrivateKeyAttributes}}
+ PrivateKeyAttributes ATTRIBUTE ::= {...}
+
+ --
+ -- 6. Registration Controls in CRMF
+ --
+
+ id-regCtrl OBJECT IDENTIFIER ::= { id-pkip 1 }
+
+ RegControlSet ATTRIBUTE ::= {
+ regCtrl-regToken | regCtrl-authenticator |
+ regCtrl-pkiPublicationInfo | regCtrl-pkiArchiveOptions |
+ regCtrl-oldCertID | regCtrl-protocolEncrKey, ... }
+
+ --
+ -- 6.1. Registration Token Control
+ --
+
+ regCtrl-regToken ATTRIBUTE ::=
+ { TYPE RegToken IDENTIFIED BY id-regCtrl-regToken }
+
+ id-regCtrl-regToken OBJECT IDENTIFIER ::= { id-regCtrl 1 }
+
+ RegToken ::= UTF8String
+
+ --
+ -- 6.2. Authenticator Control
+ --
+
+ regCtrl-authenticator ATTRIBUTE ::=
+ { TYPE Authenticator IDENTIFIED BY id-regCtrl-authenticator }
+
+ id-regCtrl-authenticator OBJECT IDENTIFIER ::= { id-regCtrl 2 }
+
+ Authenticator ::= UTF8String
+
+ --
+ -- 6.3. Publication Information Control
+ --
+
+ regCtrl-pkiPublicationInfo ATTRIBUTE ::=
+ { TYPE PKIPublicationInfo IDENTIFIED BY
+ id-regCtrl-pkiPublicationInfo }
+
+ id-regCtrl-pkiPublicationInfo OBJECT IDENTIFIER ::= { id-regCtrl 3 }
+
+ PKIPublicationInfo ::= SEQUENCE {
+ action INTEGER {
+ dontPublish (0),
+ pleasePublish (1) },
+ pubInfos SEQUENCE SIZE (1..MAX) OF SinglePubInfo OPTIONAL }
+ -- pubInfos MUST NOT be present if action is "dontPublish"
+ -- (if action is "pleasePublish" and pubInfos is omitted,
+ -- "dontCare" is assumed)
+
+ SinglePubInfo ::= SEQUENCE {
+ pubMethod INTEGER {
+ dontCare (0),
+ x500 (1),
+ web (2),
+ ldap (3) },
+ pubLocation GeneralName OPTIONAL }
+
+ --
+ -- 6.4. Archive Options Control
+ --
+
+ regCtrl-pkiArchiveOptions ATTRIBUTE ::=
+ { TYPE PKIArchiveOptions IDENTIFIED BY
+ id-regCtrl-pkiArchiveOptions }
+
+ id-regCtrl-pkiArchiveOptions OBJECT IDENTIFIER ::= { id-regCtrl 4 }
+
+ PKIArchiveOptions ::= CHOICE {
+ encryptedPrivKey [0] EncryptedKey,
+ -- the actual value of the private key
+ keyGenParameters [1] KeyGenParameters,
+ -- parameters that allow the private key to be re-generated
+ archiveRemGenPrivKey [2] BOOLEAN }
+ -- set to TRUE if sender wishes receiver to archive the private
+ -- key of a key pair that the receiver generates in response to
+ -- this request; set to FALSE if no archive is desired.
+
+ EncryptedKey ::= CHOICE {
+ encryptedValue EncryptedValue, -- Deprecated
+ envelopedData [0] EnvelopedData }
+ -- The encrypted private key MUST be placed in the envelopedData
+ -- encryptedContentInfo encryptedContent OCTET STRING.
+
+ --
+ -- We skipped doing the full constraints here since this structure
+ -- has been deprecated in favor of EnvelopedData
+ --
+
+ EncryptedValue ::= SEQUENCE {
+ intendedAlg [0] AlgorithmIdentifier{ALGORITHM, {...}} OPTIONAL,
+ -- the intended algorithm for which the value will be used
+ symmAlg [1] AlgorithmIdentifier{ALGORITHM, {...}} OPTIONAL,
+ -- the symmetric algorithm used to encrypt the value
+ encSymmKey [2] BIT STRING OPTIONAL,
+ -- the (encrypted) symmetric key used to encrypt the value
+ keyAlg [3] AlgorithmIdentifier{ALGORITHM, {...}} OPTIONAL,
+ -- algorithm used to encrypt the symmetric key
+ valueHint [4] OCTET STRING OPTIONAL,
+ -- a brief description or identifier of the encValue content
+ -- (may be meaningful only to the sending entity, and used only
+ -- if EncryptedValue might be re-examined by the sending entity
+ -- in the future)
+ encValue BIT STRING }
+ -- the encrypted value itself
+ -- When EncryptedValue is used to carry a private key (as opposed to
+ -- a certificate), implementations MUST support the encValue field
+ -- containing an encrypted PrivateKeyInfo as defined in [PKCS11],
+ -- section 12.11. If encValue contains some other format/encoding
+ -- for the private key, the first octet of valueHint MAY be used
+ -- to indicate the format/encoding (but note that the possible values
+ -- of this octet are not specified at this time). In all cases, the
+ -- intendedAlg field MUST be used to indicate at least the OID of
+ -- the intended algorithm of the private key, unless this information
+ -- is known a priori to both sender and receiver by some other means.
+
+ KeyGenParameters ::= OCTET STRING
+
+ --
+ -- 6.5. OldCert ID Control
+ --
+
+ regCtrl-oldCertID ATTRIBUTE ::=
+ { TYPE OldCertId IDENTIFIED BY id-regCtrl-oldCertID }
+
+ id-regCtrl-oldCertID OBJECT IDENTIFIER ::= { id-regCtrl 5 }
+
+ OldCertId ::= CertId
+
+ CertId ::= SEQUENCE {
+ issuer GeneralName,
+ serialNumber INTEGER }
+
+ --
+ -- 6.6. Protocol Encryption Key Control
+ --
+
+ regCtrl-protocolEncrKey ATTRIBUTE ::=
+ { TYPE ProtocolEncrKey IDENTIFIED BY id-regCtrl-protocolEncrKey }
+ id-regCtrl-protocolEncrKey OBJECT IDENTIFIER ::= { id-regCtrl 6 }
+
+ ProtocolEncrKey ::= SubjectPublicKeyInfo
+
+ --
+ -- 7. Registration Info in CRMF
+ --
+
+ id-regInfo OBJECT IDENTIFIER ::= { id-pkip 2 }
+
+ RegInfoSet ATTRIBUTE ::=
+ { regInfo-utf8Pairs | regInfo-certReq }
+
+ --
+ -- 7.1. utf8Pairs RegInfo Control
+ --
+
+ regInfo-utf8Pairs ATTRIBUTE ::=
+ { TYPE UTF8Pairs IDENTIFIED BY id-regInfo-utf8Pairs }
+
+ id-regInfo-utf8Pairs OBJECT IDENTIFIER ::= { id-regInfo 1 }
+ --with syntax
+ UTF8Pairs ::= UTF8String
+
+ --
+ -- 7.2. certReq RegInfo Control
+ --
+
+ regInfo-certReq ATTRIBUTE ::=
+ { TYPE CertReq IDENTIFIED BY id-regInfo-certReq }
+
+ id-regInfo-certReq OBJECT IDENTIFIER ::= { id-regInfo 2 }
+ --with syntax
+ CertReq ::= CertRequest
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Protected-Part-Descriptors.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Protected-Part-Descriptors.asn1
index 5512f1590b..5512f1590b 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Protected-Part-Descriptors.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Protected-Part-Descriptors.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/ProtocolObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/ProtocolObjectIdentifiers.asn1
index d6e88a2e47..d6e88a2e47 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/ProtocolObjectIdentifiers.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/ProtocolObjectIdentifiers.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Coding-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Coding-Attributes.asn1
index 258c5f0b23..258c5f0b23 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Coding-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Coding-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Presentation-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Presentation-Attributes.asn1
index c8f3a2ff33..c8f3a2ff33 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Presentation-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Presentation-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Profile-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Profile-Attributes.asn1
index 365144ff35..365144ff35 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Profile-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Profile-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Reliable-Transfer-APDU.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Reliable-Transfer-APDU.asn1
index d00570b7e7..d00570b7e7 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Reliable-Transfer-APDU.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Reliable-Transfer-APDU.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Abstract-Syntaxes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Abstract-Syntaxes.asn1
index 4a59cc403b..4a59cc403b 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Abstract-Syntaxes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Abstract-Syntaxes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Generic-ROS-PDUs.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Generic-ROS-PDUs.asn1
index e55ea3c05e..e55ea3c05e 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Generic-ROS-PDUs.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Generic-ROS-PDUs.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Information-Objects-extensions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Information-Objects-extensions.asn1
index 671cf0e780..671cf0e780 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Information-Objects-extensions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Information-Objects-extensions.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Information-Objects.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Information-Objects.asn1
index b497e4126b..b497e4126b 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Information-Objects.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Information-Objects.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Realizations.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Realizations.asn1
index 73b49c8d7a..73b49c8d7a 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Realizations.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Realizations.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Useful-Definitions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Useful-Definitions.asn1
index e526ff4600..e526ff4600 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Useful-Definitions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Useful-Definitions.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/SCVP-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/SCVP-2009.asn1
new file mode 100644
index 0000000000..f74f76ff7c
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/SCVP-2009.asn1
@@ -0,0 +1,608 @@
+ SCVP-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-scvp-02(52) }
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ Extensions{}, EXTENSION, ATTRIBUTE
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57) }
+
+ AlgorithmIdentifier{}, SIGNATURE-ALGORITHM, PUBLIC-KEY, KEY-AGREE,
+ DIGEST-ALGORITHM, KEY-DERIVATION, MAC-ALGORITHM
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ Certificate, CertificateList, CertificateSerialNumber,
+ SignatureAlgorithms, SubjectPublicKeyInfo
+ FROM PKIX1Explicit-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51) }
+
+ GeneralNames, GeneralName, KeyUsage, KeyPurposeId
+ FROM PKIX1Implicit-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59) }
+
+ AttributeCertificate
+ FROM PKIXAttributeCertificate-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-attribute-cert-02(47) }
+
+ OCSPResponse
+ FROM OCSP-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-ocsp-02(48) }
+
+ ContentInfo, CONTENT-TYPE
+ FROM CryptographicMessageSyntax-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cms-2004-02(41) }
+
+ mda-sha1
+ FROM PKIXAlgs-2009
+ { iso(1) identified-organization(3) dod(6)
+ internet(1) security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-algorithms2008-02(56) } ;
+
+ ContentTypes CONTENT-TYPE ::= {ct-scvp-certValRequest |
+ ct-scvp-certValResponse | ct-scvp-valPolRequest |
+ ct-scvp-valPolResponse, ... }
+
+ id-ct OBJECT IDENTIFIER ::=
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs9(9)
+ id-smime(16) 1 }
+
+ ct-scvp-certValRequest CONTENT-TYPE ::=
+ { CVRequest IDENTIFIED BY id-ct-scvp-certValRequest }
+
+ id-ct-scvp-certValRequest OBJECT IDENTIFIER ::= { id-ct 10 }
+
+ -- SCVP Certificate Validation Request
+
+ CVRequest ::= SEQUENCE {
+ cvRequestVersion INTEGER DEFAULT 1,
+ query Query,
+ requestorRef [0] GeneralNames OPTIONAL,
+ requestNonce [1] OCTET STRING OPTIONAL,
+ requestorName [2] GeneralName OPTIONAL,
+ responderName [3] GeneralName OPTIONAL,
+ requestExtensions [4] Extensions{{RequestExtensions}}
+ OPTIONAL,
+ signatureAlg [5] AlgorithmIdentifier
+ {SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}}
+ OPTIONAL,
+ hashAlg [6] OBJECT IDENTIFIER OPTIONAL,
+ requestorText [7] UTF8String (SIZE (1..256)) OPTIONAL
+ }
+
+ -- Set of signature algorithms is coming from RFC 5280
+ -- SignatureAlgorithms SIGNATURE-ALGORITHM ::= {...}
+
+ -- Add supported request extensions here; all new items should
+ -- be added after the extension marker
+
+ RequestExtensions EXTENSION ::= {...}
+
+ Query ::= SEQUENCE {
+ queriedCerts CertReferences,
+ checks CertChecks,
+ wantBack [1] WantBack OPTIONAL,
+ validationPolicy ValidationPolicy,
+ responseFlags ResponseFlags OPTIONAL,
+ serverContextInfo [2] OCTET STRING OPTIONAL,
+ validationTime [3] GeneralizedTime OPTIONAL,
+ intermediateCerts [4] CertBundle OPTIONAL,
+ revInfos [5] RevocationInfos OPTIONAL,
+ producedAt [6] GeneralizedTime OPTIONAL,
+ queryExtensions [7] Extensions{{QueryExtensions}} OPTIONAL
+ }
+
+ -- Add supported query extensions here; all new items should be added
+ -- after the extension marker
+
+ QueryExtensions EXTENSION ::= {...}
+
+ CertReferences ::= CHOICE {
+ pkcRefs [0] SEQUENCE SIZE (1..MAX) OF PKCReference,
+ acRefs [1] SEQUENCE SIZE (1..MAX) OF ACReference
+ }
+
+ CertReference::= CHOICE {
+ pkc PKCReference,
+ ac ACReference
+ }
+
+ PKCReference ::= CHOICE {
+ cert [0] Certificate,
+ pkcRef [1] SCVPCertID
+ }
+
+ ACReference ::= CHOICE {
+ attrCert [2] AttributeCertificate,
+ acRef [3] SCVPCertID
+ }
+
+ HashAlgorithm ::= AlgorithmIdentifier{DIGEST-ALGORITHM,
+ {mda-sha1, ...}}
+
+ SCVPCertID ::= SEQUENCE {
+ certHash OCTET STRING,
+ issuerSerial SCVPIssuerSerial,
+ hashAlgorithm HashAlgorithm
+ DEFAULT { algorithm mda-sha1.&id }
+ }
+
+ SCVPIssuerSerial ::= SEQUENCE {
+ issuer GeneralNames,
+ serialNumber CertificateSerialNumber
+ }
+
+ ValidationPolicy ::= SEQUENCE {
+ validationPolRef ValidationPolRef,
+ validationAlg [0] ValidationAlg OPTIONAL,
+ userPolicySet [1] SEQUENCE SIZE (1..MAX) OF OBJECT
+ IDENTIFIER OPTIONAL,
+ inhibitPolicyMapping [2] BOOLEAN OPTIONAL,
+ requireExplicitPolicy [3] BOOLEAN OPTIONAL,
+ inhibitAnyPolicy [4] BOOLEAN OPTIONAL,
+ trustAnchors [5] TrustAnchors OPTIONAL,
+ keyUsages [6] SEQUENCE OF KeyUsage OPTIONAL,
+ extendedKeyUsages [7] SEQUENCE OF KeyPurposeId OPTIONAL,
+ specifiedKeyUsages [8] SEQUENCE OF KeyPurposeId OPTIONAL
+ }
+
+ CertChecks ::= SEQUENCE SIZE (1..MAX) OF
+ OBJECT IDENTIFIER (CertCheckSet | ACertCheckSet, ... )
+
+ WantBack ::= SEQUENCE SIZE (1..MAX) OF
+ WANT-BACK.&id ({AllWantBacks})
+
+ POLICY ::= ATTRIBUTE
+
+ ValidationPolRefSet POLICY ::= {
+ svp-defaultValPolicy, ...
+ }
+
+ ValidationPolRef ::= SEQUENCE {
+ valPolId POLICY.&id,
+ valPolParams POLICY.&Type OPTIONAL
+ }
+
+ ValidationAlgSet POLICY ::= {
+ svp-basicValAlg, ...
+ }
+
+ ValidationAlg ::= SEQUENCE {
+ valAlgId POLICY.&id,
+ parameters POLICY.&Type OPTIONAL
+ }
+
+ NameValidationAlgSet POLICY ::= {
+ svp-nameValAlg, ...
+ }
+
+ NameValidationAlgParams ::= SEQUENCE {
+ nameCompAlgId OBJECT IDENTIFIER (NameCompAlgSet, ... ),
+ validationNames GeneralNames
+ }
+
+ TrustAnchors ::= SEQUENCE SIZE (1..MAX) OF PKCReference
+ KeyAgreePublicKey ::= SEQUENCE {
+ algorithm AlgorithmIdentifier{KEY-AGREE,
+ {SupportedKeyAgreePublicKeys}},
+ publicKey BIT STRING,
+ macAlgorithm AlgorithmIdentifier{MAC-ALGORITHM,
+ {SupportedMACAlgorithms}},
+ kDF AlgorithmIdentifier{KEY-DERIVATION,
+ {SupportedKeyDerivationFunctions}}
+ OPTIONAL
+ }
+
+ SupportedKeyAgreePublicKeys KEY-AGREE ::= {...}
+ SupportedMACAlgorithms MAC-ALGORITHM ::= {...}
+ SupportedKeyDerivationFunctions KEY-DERIVATION ::= {...}
+
+ ResponseFlags ::= SEQUENCE {
+ fullRequestInResponse [0] BOOLEAN DEFAULT FALSE,
+ responseValidationPolByRef [1] BOOLEAN DEFAULT TRUE,
+ protectResponse [2] BOOLEAN DEFAULT TRUE,
+ cachedResponse [3] BOOLEAN DEFAULT TRUE
+ }
+
+ CertBundle ::= SEQUENCE SIZE (1..MAX) OF Certificate
+
+ RevocationInfos ::= SEQUENCE SIZE (1..MAX) OF RevocationInfo
+
+ RevocationInfo ::= CHOICE {
+ crl [0] CertificateList,
+ delta-crl [1] CertificateList,
+ ocsp [2] OCSPResponse,
+ other [3] OtherRevInfo
+ }
+
+ REV-INFO ::= TYPE-IDENTIFIER
+
+ OtherRevInfo ::= SEQUENCE {
+ riType REV-INFO.&id,
+ riValue REV-INFO.&Type
+ }
+
+ -- SCVP Certificate Validation Response
+
+ ct-scvp-certValResponse CONTENT-TYPE ::=
+ { CVResponse IDENTIFIED BY id-ct-scvp-certValResponse }
+
+ id-ct-scvp-certValResponse OBJECT IDENTIFIER ::= { id-ct 11 }
+
+ CVResponse ::= SEQUENCE {
+ cvResponseVersion INTEGER,
+ serverConfigurationID INTEGER,
+ producedAt GeneralizedTime,
+ responseStatus ResponseStatus,
+ respValidationPolicy [0] RespValidationPolicy OPTIONAL,
+ requestRef [1] RequestReference OPTIONAL,
+ requestorRef [2] GeneralNames OPTIONAL,
+ requestorName [3] GeneralNames OPTIONAL,
+ replyObjects [4] ReplyObjects OPTIONAL,
+ respNonce [5] OCTET STRING OPTIONAL,
+ serverContextInfo [6] OCTET STRING OPTIONAL,
+ cvResponseExtensions [7] Extensions{{CVResponseExtensions}}
+ OPTIONAL,
+ requestorText [8] UTF8String (SIZE (1..256)) OPTIONAL
+ }
+
+ -- This document defines no extensions
+ CVResponseExtensions EXTENSION ::= {...}
+
+ ResponseStatus ::= SEQUENCE {
+ statusCode CVStatusCode DEFAULT okay,
+ errorMessage UTF8String OPTIONAL
+ }
+
+ CVStatusCode ::= ENUMERATED {
+ okay (0),
+ skipUnrecognizedItems (1),
+ tooBusy (10),
+ invalidRequest (11),
+ internalError (12),
+ badStructure (20),
+ unsupportedVersion (21),
+ abortUnrecognizedItems (22),
+ unrecognizedSigKey (23),
+ badSignatureOrMAC (24),
+ unableToDecode (25),
+ notAuthorized (26),
+ unsupportedChecks (27),
+ unsupportedWantBacks (28),
+ unsupportedSignatureOrMAC (29),
+ invalidSignatureOrMAC (30),
+ protectedResponseUnsupported (31),
+ unrecognizedResponderName (32),
+ relayingLoop (40),
+ unrecognizedValPol (50),
+ unrecognizedValAlg (51),
+ fullRequestInResponseUnsupported (52),
+ fullPolResponseUnsupported (53),
+ inhibitPolicyMappingUnsupported (54),
+ requireExplicitPolicyUnsupported (55),
+ inhibitAnyPolicyUnsupported (56),
+ validationTimeUnsupported (57),
+ unrecognizedCritQueryExt (63),
+ unrecognizedCritRequestExt (64),
+ ...
+ }
+
+ RespValidationPolicy ::= ValidationPolicy
+
+ RequestReference ::= CHOICE {
+ requestHash [0] HashValue, -- hash of CVRequest
+ fullRequest [1] CVRequest }
+
+ HashValue ::= SEQUENCE {
+ algorithm HashAlgorithm
+ DEFAULT { algorithm mda-sha1.&id },
+ value OCTET STRING }
+
+ ReplyObjects ::= SEQUENCE SIZE (1..MAX) OF CertReply
+
+ CertReply ::= SEQUENCE {
+ cert CertReference,
+ replyStatus ReplyStatus DEFAULT success,
+ replyValTime GeneralizedTime,
+ replyChecks ReplyChecks,
+ replyWantBacks ReplyWantBacks,
+ validationErrors [0] SEQUENCE SIZE (1..MAX) OF
+ OBJECT IDENTIFIER ( BasicValidationErrorSet |
+ NameValidationErrorSet,
+ ... ) OPTIONAL,
+ nextUpdate [1] GeneralizedTime OPTIONAL,
+ certReplyExtensions [2] Extensions{{...}} OPTIONAL
+ }
+
+ ReplyStatus ::= ENUMERATED {
+ success (0),
+ malformedPKC (1),
+ malformedAC (2),
+ unavailableValidationTime (3),
+ referenceCertHashFail (4),
+ certPathConstructFail (5),
+ certPathNotValid (6),
+ certPathNotValidNow (7),
+ wantBackUnsatisfied (8)
+ }
+ ReplyChecks ::= SEQUENCE OF ReplyCheck
+
+ ReplyCheck ::= SEQUENCE {
+ check OBJECT IDENTIFIER (CertCheckSet | ACertCheckSet, ... ),
+ status INTEGER DEFAULT 0
+ }
+
+ ReplyWantBacks ::= SEQUENCE OF ReplyWantBack
+
+ ReplyWantBack::= SEQUENCE {
+ wb WANT-BACK.&id({AllWantBacks}),
+ value OCTET STRING
+ (CONTAINING WANT-BACK.&Type({AllWantBacks}{@wb}))
+ }
+
+ WANT-BACK ::= TYPE-IDENTIFIER
+
+ AllWantBacks WANT-BACK ::= {
+ WantBackSet | ACertWantBackSet | AnyWantBackSet, ...
+ }
+
+ CertBundles ::= SEQUENCE SIZE (1..MAX) OF CertBundle
+
+ RevInfoWantBack ::= SEQUENCE {
+ revocationInfo RevocationInfos,
+ extraCerts CertBundle OPTIONAL
+ }
+
+ SCVPResponses ::= SEQUENCE OF ContentInfo
+
+ -- SCVP Validation Policies Request
+
+ ct-scvp-valPolRequest CONTENT-TYPE ::=
+ { ValPolRequest IDENTIFIED BY id-ct-scvp-valPolRequest }
+
+ id-ct-scvp-valPolRequest OBJECT IDENTIFIER ::= { id-ct 12 }
+
+ ValPolRequest ::= SEQUENCE {
+ vpRequestVersion INTEGER DEFAULT 1,
+ requestNonce OCTET STRING
+ }
+
+ -- SCVP Validation Policies Response
+
+ ct-scvp-valPolResponse CONTENT-TYPE ::=
+ { ValPolResponse IDENTIFIED BY id-ct-scvp-valPolResponse }
+
+ id-ct-scvp-valPolResponse OBJECT IDENTIFIER ::= { id-ct 13 }
+ ValPolResponse ::= SEQUENCE {
+ vpResponseVersion INTEGER,
+ maxCVRequestVersion INTEGER,
+ maxVPRequestVersion INTEGER,
+ serverConfigurationID INTEGER,
+ thisUpdate GeneralizedTime,
+ nextUpdate GeneralizedTime OPTIONAL,
+ supportedChecks CertChecks,
+ supportedWantBacks WantBack,
+ validationPolicies SEQUENCE OF OBJECT IDENTIFIER,
+ validationAlgs SEQUENCE OF OBJECT IDENTIFIER,
+ authPolicies SEQUENCE OF AuthPolicy,
+ responseTypes ResponseTypes,
+ defaultPolicyValues RespValidationPolicy,
+ revocationInfoTypes RevocationInfoTypes,
+ signatureGeneration SEQUENCE OF AlgorithmIdentifier
+ {SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}},
+ signatureVerification SEQUENCE OF AlgorithmIdentifier
+ {SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}},
+ hashAlgorithms SEQUENCE SIZE (1..MAX) OF
+ OBJECT IDENTIFIER,
+ serverPublicKeys SEQUENCE OF KeyAgreePublicKey
+ OPTIONAL,
+ clockSkew INTEGER DEFAULT 10,
+ requestNonce OCTET STRING OPTIONAL
+ }
+
+ ResponseTypes ::= ENUMERATED {
+ cached-only (0),
+ non-cached-only (1),
+ cached-and-non-cached (2)
+ }
+
+ RevocationInfoTypes ::= BIT STRING {
+ fullCRLs (0),
+ deltaCRLs (1),
+ indirectCRLs (2),
+ oCSPResponses (3)
+ }
+
+ AuthPolicy ::= OBJECT IDENTIFIER
+
+ -- SCVP Check Identifiers
+
+ id-stc OBJECT IDENTIFIER ::=
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) 17 }
+
+ CertCheckSet OBJECT IDENTIFIER ::= {
+ id-stc-build-pkc-path | id-stc-build-valid-pkc-path |
+ id-stc-build-status-checked-pkc-path, ... }
+
+ id-stc-build-pkc-path OBJECT IDENTIFIER ::= { id-stc 1 }
+ id-stc-build-valid-pkc-path OBJECT IDENTIFIER ::= { id-stc 2 }
+ id-stc-build-status-checked-pkc-path
+ OBJECT IDENTIFIER ::= { id-stc 3 }
+
+ ACertCheckSet OBJECT IDENTIFIER ::= {
+ id-stc-build-aa-path | id-stc-build-valid-aa-path |
+ id-stc-build-status-checked-aa-path |
+ id-stc-status-check-ac-and-build-status-checked-aa-path
+ }
+
+ id-stc-build-aa-path OBJECT IDENTIFIER ::= { id-stc 4 }
+ id-stc-build-valid-aa-path OBJECT IDENTIFIER ::= { id-stc 5 }
+ id-stc-build-status-checked-aa-path
+ OBJECT IDENTIFIER ::= { id-stc 6 }
+ id-stc-status-check-ac-and-build-status-checked-aa-path
+ OBJECT IDENTIFIER ::= { id-stc 7 }
+
+ -- SCVP WantBack Identifiers
+
+ id-swb OBJECT IDENTIFIER ::=
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) 18 }
+
+ WantBackSet WANT-BACK ::= {
+ swb-pkc-cert | swb-pkc-best-cert-path |
+ swb-pkc-revocation-info | swb-pkc-public-key-info |
+ swb-pkc-all-cert-paths | swb-pkc-ee-revocation-info |
+ swb-pkc-CAs-revocation-info
+ }
+
+ ACertWantBackSet WANT-BACK ::= {
+ swb-ac-cert | swb-aa-cert-path |
+ swb-aa-revocation-info | swb-ac-revocation-info
+ }
+
+ AnyWantBackSet WANT-BACK ::= { swb-relayed-responses }
+
+ swb-pkc-best-cert-path WANT-BACK ::=
+ { CertBundle IDENTIFIED BY id-swb-pkc-best-cert-path }
+ id-swb-pkc-best-cert-path OBJECT IDENTIFIER ::= { id-swb 1 }
+ swb-pkc-revocation-info WANT-BACK ::=
+ { RevInfoWantBack IDENTIFIED BY id-swb-pkc-revocation-info }
+ id-swb-pkc-revocation-info OBJECT IDENTIFIER ::= { id-swb 2 }
+
+ swb-pkc-public-key-info WANT-BACK ::=
+ { SubjectPublicKeyInfo IDENTIFIED BY id-swb-pkc-public-key-info }
+ id-swb-pkc-public-key-info OBJECT IDENTIFIER ::= { id-swb 4 }
+
+ swb-aa-cert-path WANT-BACK ::=
+ {CertBundle IDENTIFIED BY id-swb-aa-cert-path }
+ id-swb-aa-cert-path OBJECT IDENTIFIER ::= { id-swb 5 }
+
+ swb-aa-revocation-info WANT-BACK ::=
+ { RevInfoWantBack IDENTIFIED BY id-swb-aa-revocation-info }
+ id-swb-aa-revocation-info OBJECT IDENTIFIER ::= { id-swb 6 }
+
+ swb-ac-revocation-info WANT-BACK ::=
+ { RevInfoWantBack IDENTIFIED BY id-swb-ac-revocation-info }
+ id-swb-ac-revocation-info OBJECT IDENTIFIER ::= { id-swb 7 }
+
+ swb-relayed-responses WANT-BACK ::=
+ {SCVPResponses IDENTIFIED BY id-swb-relayed-responses }
+
+ id-swb-relayed-responses OBJECT IDENTIFIER ::= { id-swb 9 }
+
+ swb-pkc-all-cert-paths WANT-BACK ::=
+ {CertBundles IDENTIFIED BY id-swb-pkc-all-cert-paths }
+ id-swb-pkc-all-cert-paths OBJECT IDENTIFIER ::= { id-swb 12}
+
+ swb-pkc-ee-revocation-info WANT-BACK ::=
+ { RevInfoWantBack IDENTIFIED BY id-swb-pkc-ee-revocation-info }
+ id-swb-pkc-ee-revocation-info OBJECT IDENTIFIER ::= { id-swb 13}
+
+ swb-pkc-CAs-revocation-info WANT-BACK ::=
+ { RevInfoWantBack IDENTIFIED BY id-swb-pkc-CAs-revocation-info }
+ id-swb-pkc-CAs-revocation-info OBJECT IDENTIFIER ::= { id-swb 14}
+
+ swb-pkc-cert WANT-BACK ::=
+ { Certificate IDENTIFIED BY id-swb-pkc-cert }
+ id-swb-pkc-cert OBJECT IDENTIFIER ::= { id-swb 10}
+
+ swb-ac-cert WANT-BACK ::=
+ { AttributeCertificate IDENTIFIED BY id-swb-ac-cert }
+ id-swb-ac-cert OBJECT IDENTIFIER ::= { id-swb 11}
+
+ -- SCVP Validation Policy and Algorithm Identifiers
+
+ id-svp OBJECT IDENTIFIER ::=
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) 19 }
+
+ svp-defaultValPolicy POLICY ::=
+ { IDENTIFIED BY id-svp-defaultValPolicy }
+
+ id-svp-defaultValPolicy OBJECT IDENTIFIER ::= { id-svp 1 }
+
+ -- SCVP Basic Validation Algorithm Identifier
+
+ svp-basicValAlg POLICY ::= {IDENTIFIED BY id-svp-basicValAlg }
+
+ id-svp-basicValAlg OBJECT IDENTIFIER ::= { id-svp 3 }
+
+ -- SCVP Basic Validation Algorithm Errors
+
+ id-bvae OBJECT IDENTIFIER ::= id-svp-basicValAlg
+
+ BasicValidationErrorSet OBJECT IDENTIFIER ::= {
+ id-bvae-expired | id-bvae-not-yet-valid |
+ id-bvae-wrongTrustAnchor | id-bvae-noValidCertPath |
+ id-bvae-revoked | id-bvae-invalidKeyPurpose |
+ id-bvae-invalidKeyUsage | id-bvae-invalidCertPolicy
+ }
+
+ id-bvae-expired OBJECT IDENTIFIER ::= { id-bvae 1 }
+ id-bvae-not-yet-valid OBJECT IDENTIFIER ::= { id-bvae 2 }
+ id-bvae-wrongTrustAnchor OBJECT IDENTIFIER ::= { id-bvae 3 }
+ id-bvae-noValidCertPath OBJECT IDENTIFIER ::= { id-bvae 4 }
+ id-bvae-revoked OBJECT IDENTIFIER ::= { id-bvae 5 }
+ id-bvae-invalidKeyPurpose OBJECT IDENTIFIER ::= { id-bvae 9 }
+ id-bvae-invalidKeyUsage OBJECT IDENTIFIER ::= { id-bvae 10 }
+ id-bvae-invalidCertPolicy OBJECT IDENTIFIER ::= { id-bvae 11 }
+
+ -- SCVP Name Validation Algorithm Identifier
+
+ svp-nameValAlg POLICY ::=
+ {TYPE NameValidationAlgParams IDENTIFIED BY id-svp-nameValAlg }
+
+ id-svp-nameValAlg OBJECT IDENTIFIER ::= { id-svp 2 }
+
+ -- SCVP Name Validation Algorithm DN comparison algorithm
+
+ NameCompAlgSet OBJECT IDENTIFIER ::= {
+ id-nva-dnCompAlg
+ }
+
+ id-nva-dnCompAlg OBJECT IDENTIFIER ::= { id-svp 4 }
+ -- SCVP Name Validation Algorithm Errors
+
+ id-nvae OBJECT IDENTIFIER ::= id-svp-nameValAlg
+
+ NameValidationErrorSet OBJECT IDENTIFIER ::= {
+ id-nvae-name-mismatch | id-nvae-no-name | id-nvae-unknown-alg |
+ id-nvae-bad-name | id-nvae-bad-name-type | id-nvae-mixed-names
+ }
+
+ id-nvae-name-mismatch OBJECT IDENTIFIER ::= { id-nvae 1 }
+ id-nvae-no-name OBJECT IDENTIFIER ::= { id-nvae 2 }
+ id-nvae-unknown-alg OBJECT IDENTIFIER ::= { id-nvae 3 }
+ id-nvae-bad-name OBJECT IDENTIFIER ::= { id-nvae 4 }
+ id-nvae-bad-name-type OBJECT IDENTIFIER ::= { id-nvae 5 }
+ id-nvae-mixed-names OBJECT IDENTIFIER ::= { id-nvae 6 }
+
+ -- SCVP Extended Key Usage Key Purpose Identifiers
+
+ id-kp OBJECT IDENTIFIER ::=
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) 3 }
+
+ SvcpExtKeyUsageSet OBJECT IDENTIFIER ::= {
+ id-kp-scvpServer | id-kp-scvpClient
+ }
+
+ id-kp-scvpServer OBJECT IDENTIFIER ::= { id-kp 15 }
+
+ id-kp-scvpClient OBJECT IDENTIFIER ::= { id-kp 16 }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/SecureMimeMessageV3dot1-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/SecureMimeMessageV3dot1-2009.asn1
new file mode 100644
index 0000000000..2bd2aaa435
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/SecureMimeMessageV3dot1-2009.asn1
@@ -0,0 +1,122 @@
+ SecureMimeMessageV3dot1-2009
+ {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-msg-v3dot1-02(39)}
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ SMIME-CAPS, SMIMECapabilities{}
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ ATTRIBUTE
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57)}
+
+ SubjectKeyIdentifier, IssuerAndSerialNumber, RecipientKeyIdentifier
+ FROM CryptographicMessageSyntax-2009
+ {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cms-2004-02(41)}
+
+ rc2-cbc, SMimeCaps
+ FROM CryptographicMessageSyntaxAlgorithms-2009
+ {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cmsalg-2001-02(37)}
+
+ SMimeCaps
+ FROM PKIXAlgs-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-algorithms2008-02(56)}
+
+ SMimeCaps
+ FROM PKIX1-PSS-OAEP-Algorithms-2009
+ {iso(1) identified-organization(3) dod(6) internet(1)
+ security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-rsa-pkalgs-02(54)};
+
+ SMimeAttributeSet ATTRIBUTE ::=
+ { aa-smimeCapabilities | aa-encrypKeyPref, ... }
+
+ -- id-aa is the arc with all new authenticated and unauthenticated
+ -- attributes produced by the S/MIME Working Group
+
+ id-aa OBJECT IDENTIFIER ::=
+ { iso(1) member-body(2) usa(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) attributes(2)}
+
+ -- The S/MIME Capabilities attribute provides a method of broadcasting
+ -- the symmetric capabilities understood. Algorithms SHOULD be ordered
+ -- by preference and grouped by type
+
+ aa-smimeCapabilities ATTRIBUTE ::=
+ { TYPE SMIMECapabilities{{SMimeCapsSet}} IDENTIFIED BY
+ smimeCapabilities }
+ smimeCapabilities OBJECT IDENTIFIER ::=
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ 15 }
+
+ SMimeCapsSet SMIME-CAPS ::=
+ { cap-preferBinaryInside | cap-RC2CBC |
+ PKIXAlgs-2009.SMimeCaps |
+ CryptographicMessageSyntaxAlgorithms-2009.SMimeCaps |
+ PKIX1-PSS-OAEP-Algorithms-2009.SMimeCaps, ... }
+
+ -- Encryption Key Preference provides a method of broadcasting the
+ -- preferred encryption certificate.
+
+ aa-encrypKeyPref ATTRIBUTE ::=
+ { TYPE SMIMEEncryptionKeyPreference
+ IDENTIFIED BY id-aa-encrypKeyPref }
+
+ id-aa-encrypKeyPref OBJECT IDENTIFIER ::= {id-aa 11}
+
+ SMIMEEncryptionKeyPreference ::= CHOICE {
+ issuerAndSerialNumber [0] IssuerAndSerialNumber,
+ receipentKeyId [1] RecipientKeyIdentifier,
+ subjectAltKeyIdentifier [2] SubjectKeyIdentifier
+ }
+
+ -- receipentKeyId is spelt incorrectly, but kept for historical
+ -- reasons.
+
+ id-smime OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs9(9) 16 }
+
+ id-cap OBJECT IDENTIFIER ::= { id-smime 11 }
+
+ -- The preferBinaryInside indicates an ability to receive messages
+ -- with binary encoding inside the CMS wrapper
+
+ cap-preferBinaryInside SMIME-CAPS ::=
+ { -- No value -- IDENTIFIED BY id-cap-preferBinaryInside }
+
+ id-cap-preferBinaryInside OBJECT IDENTIFIER ::= { id-cap 1 }
+
+ -- The following list OIDs to be used with S/MIME V3
+
+ -- Signature Algorithms Not Found in [RFC3370]
+ --
+ -- md2WithRSAEncryption OBJECT IDENTIFIER ::=
+ -- {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-1(1)
+ -- 2}
+ --
+ -- Other Signed Attributes
+ --
+ -- signingTime OBJECT IDENTIFIER ::=
+ -- {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ -- 5}
+ -- See [RFC5652] for a description of how to encode the attribute
+ -- value.
+
+ cap-RC2CBC SMIME-CAPS ::=
+ { TYPE SMIMECapabilitiesParametersForRC2CBC
+ IDENTIFIED BY rc2-cbc}
+
+ SMIMECapabilitiesParametersForRC2CBC ::= INTEGER (40 | 128, ...)
+ -- (RC2 Key Length (number of bits))
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/SelectedAttributeTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/SelectedAttributeTypes.asn1
index 07bba30690..07bba30690 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/SelectedAttributeTypes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/SelectedAttributeTypes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/SeseAPDUs.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/SeseAPDUs.asn1
index 2917122e94..2917122e94 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/SeseAPDUs.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/SeseAPDUs.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/SpkmGssTokens.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/SpkmGssTokens.asn1
index 02205bd64c..02205bd64c 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/SpkmGssTokens.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/SpkmGssTokens.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Style-Descriptors.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Style-Descriptors.asn1
index 8f033eab6f..8f033eab6f 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Style-Descriptors.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Style-Descriptors.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Subprofiles.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Subprofiles.asn1
index bfcd0b5dbc..bfcd0b5dbc 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Subprofiles.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Subprofiles.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Temporal-Relationships.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Temporal-Relationships.asn1
index 9633995e3b..9633995e3b 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Temporal-Relationships.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Temporal-Relationships.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Text-Units.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Text-Units.asn1
index ccc64a52f5..ccc64a52f5 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Text-Units.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Text-Units.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/UpperBounds.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/UpperBounds.asn1
index c97c83a569..c97c83a569 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/UpperBounds.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/UpperBounds.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/UsefulDefinitions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/UsefulDefinitions.asn1
index d9601bb7d0..d9601bb7d0 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/UsefulDefinitions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/UsefulDefinitions.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Videotex-Coding-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Videotex-Coding-Attributes.asn1
index 18e51cbc0d..18e51cbc0d 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Videotex-Coding-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Videotex-Coding-Attributes.asn1
diff --git a/lib/asn1/test/asn1_test_lib.erl b/lib/asn1/test/asn1_test_lib.erl
index da07cd1118..a5f46046ff 100644
--- a/lib/asn1/test/asn1_test_lib.erl
+++ b/lib/asn1/test/asn1_test_lib.erl
@@ -21,6 +21,7 @@
-export([compile/3,compile_all/3,compile_erlang/3,
hex_to_bin/1,
+ match_value/2,
parallel/0,
roundtrip/3,roundtrip/4,roundtrip_enc/3,roundtrip_enc/4]).
@@ -106,6 +107,24 @@ compile_erlang(Mod, Config, Options) ->
hex_to_bin(S) ->
<< <<(hex2num(C)):4>> || C <- S, C =/= $\s >>.
+%% match_value(Pattern, Value) -> ok.
+%% Match Pattern against Value. If the Pattern contains in any
+%% position, the corresponding position in the Value can be
+%% anything. Generate an exception if the Pattern and Value don't
+%% match.
+
+match_value('_', _) ->
+ ok;
+match_value([H1|T1], [H2|T2]) ->
+ match_value(H1, H2),
+ match_value(T1, T2);
+match_value(T1, T2) when tuple_size(T1) =:= tuple_size(T2) ->
+ match_value_tuple(1, T1, T2);
+match_value(Same, Same) ->
+ ok;
+match_value(V1, V2) ->
+ error({nomatch,V1,V2}).
+
roundtrip(Mod, Type, Value) ->
roundtrip(Mod, Type, Value, Value).
@@ -132,6 +151,12 @@ hex2num(C) when $0 =< C, C =< $9 -> C - $0;
hex2num(C) when $A =< C, C =< $F -> C - $A + 10;
hex2num(C) when $a =< C, C =< $f -> C - $a + 10.
+match_value_tuple(I, T1, T2) when I =< tuple_size(T1) ->
+ match_value(element(I, T1), element(I, T2)),
+ match_value_tuple(I+1, T1, T2);
+match_value_tuple(_, _, _) ->
+ ok.
+
test_ber_indefinite(Mod, Type, Encoded, ExpectedValue) ->
case Mod:encoding_rule() of
ber ->
diff --git a/lib/asn1/test/error_SUITE.erl b/lib/asn1/test/error_SUITE.erl
index 1edd60f7c8..a9893b91cc 100644
--- a/lib/asn1/test/error_SUITE.erl
+++ b/lib/asn1/test/error_SUITE.erl
@@ -19,9 +19,12 @@
-module(error_SUITE).
-export([suite/0,all/0,groups/0,
- already_defined/1,bitstrings/1,enumerated/1,
- imports/1,instance_of/1,integers/1,objects/1,
- parameterization/1,values/1]).
+ already_defined/1,bitstrings/1,
+ classes/1,constraints/1,constructed/1,enumerated/1,
+ imports_exports/1,instance_of/1,integers/1,objects/1,
+ object_field_extraction/1,oids/1,rel_oids/1,
+ object_sets/1,parameterization/1,
+ syntax/1,table_constraints/1,tags/1,values/1]).
-include_lib("test_server/include/test_server.hrl").
@@ -34,12 +37,22 @@ groups() ->
[{p,parallel(),
[already_defined,
bitstrings,
+ classes,
+ constraints,
+ constructed,
enumerated,
- imports,
+ imports_exports,
instance_of,
integers,
objects,
+ object_field_extraction,
+ object_sets,
+ oids,
+ rel_oids,
parameterization,
+ syntax,
+ table_constraints,
+ tags,
values]}].
parallel() ->
@@ -94,6 +107,46 @@ bitstrings(Config) ->
]} = run(P, Config),
ok.
+classes(Config) ->
+ M = 'Classes',
+ P = {M,
+ <<"Classes DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ " LowerCase ::= CLASS { &id INTEGER UNIQUE }\n"
+ " CL ::= CLASS { &id INTEGER UNIQUE DEFAULT 42}\n"
+ "END\n">>},
+ {error,
+ [{structured_error,{M,2},asn1ct_check,
+ {illegal_class_name,'LowerCase'}},
+ {structured_error,{M,3},asn1ct_check,
+ {unique_and_default,id}}
+ ]} = run(P, Config),
+ ok.
+
+constraints(Config) ->
+ M = 'Constraints',
+ P = {M,
+ <<"Constraints DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ " II-1 ::= INTEGER (holder-1.&obj)\n"
+ " II-2 ::= INTEGER ('1234'H<..20)\n"
+ " II-3 ::= INTEGER (1..<\"abc\")\n"
+ " II-4 ::= INTEGER (10..1)\n"
+
+ " HOLDER ::= CLASS {\n"
+ " &obj HOLDER OPTIONAL\n"
+ " }\n"
+
+ " holder-1 HOLDER ::= { &obj holder-2 }\n"
+ " holder-2 HOLDER ::= { }\n"
+ "END\n">>},
+ {error,
+ [
+ {structured_error,{M,2},asn1ct_check,illegal_value},
+ {structured_error,{M,3},asn1ct_check,illegal_integer_value},
+ {structured_error,{M,4},asn1ct_check,illegal_integer_value},
+ {structured_error,{M,5},asn1ct_check,reversed_range}
+ ]} = run(P, Config),
+ ok.
+
enumerated(Config) ->
M = 'Enumerated',
P = {M,
@@ -111,38 +164,77 @@ enumerated(Config) ->
" S2 ::= SEQUENCE {\n"
" e2 EnumExt DEFAULT xyz\n"
" }\n"
+
+ " BadEnum1 ::= ENUMERATED {a, b, c, b }\n"
+ " BadEnum2 ::= ENUMERATED {a(1), b(2), b(3) }\n"
+ " BadEnum3 ::= ENUMERATED {a(1), b(1) }\n"
+ " BadEnum4 ::= ENUMERATED {a, b, ..., c(0) }\n"
+ " BadEnum5 ::= ENUMERATED {a, b, ..., c(10), d(5) }\n"
"END\n">>},
{error,
[
- {structured_error,{'Enumerated',3},asn1ct_check,{undefined,d}},
- {structured_error,{'Enumerated',5},asn1ct_check,{undefined,z}},
- {structured_error,{'Enumerated',10},asn1ct_check,{undefined,aa}},
- {structured_error,{'Enumerated',13},asn1ct_check,{undefined,xyz}}
+ {structured_error,{M,3},asn1ct_check,{undefined,d}},
+ {structured_error,{M,5},asn1ct_check,{undefined,z}},
+ {structured_error,{M,6},asn1ct_check,{undefined,aa}},
+ {structured_error,{M,12},asn1ct_check,{undefined,xyz}},
+ {structured_error,{M,15},asn1ct_check,
+ {enum_illegal_redefinition,b}},
+ {structured_error,{M,16},asn1ct_check,
+ {enum_illegal_redefinition,b}},
+ {structured_error,{M,17},asn1ct_check,
+ {enum_reused_value,b,1}},
+ {structured_error,{M,18},asn1ct_check,
+ {enum_reused_value,c,0}},
+ {structured_error,{M,19},asn1ct_check,
+ {enum_not_ascending,d,5,10}}
]
} = run(P, Config),
ok.
-imports(Config) ->
+imports_exports(Config) ->
Ext = 'ExternalModule',
ExtP = {Ext,
<<"ExternalModule DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ "IMPORTS\n"
+ " Int, NonExistingImport FROM ImportsFrom;\n"
+
+ " Existing ::= INTEGER\n"
"END\n">>},
- ok = run(ExtP, Config),
+ {error,
+ [{structured_error,
+ {Ext,3},
+ asn1ct_check,
+ {undefined_import,'NonExistingImport',
+ 'ImportsFrom'}}]} = run(ExtP, Config),
M = 'Imports',
P = {M,
<<"Imports DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
- "IMPORTS NotDefined FROM ExternalModule\n"
- "X FROM UndefinedModule objid\n"
- "Y, Z FROM UndefinedModule2;\n"
+ "EXPORTS\n"
+ " T, UndefinedType;\n"
+
+ "IMPORTS\n"
+ " NotDefined, Existing, Int, NonExistingImport\n"
+ " FROM ExternalModule\n"
+ " X FROM UndefinedModule objid\n"
+ " Y, Z FROM UndefinedModule2;\n"
+
"objid OBJECT IDENTIFIER ::= {joint-iso-ccitt(2) remote-operations(4)\n"
" notation(0)}\n"
+ "T ::= INTEGER\n"
"END\n">>},
- {error,[{structured_error,{M,2},asn1ct_check,
- {undefined_import,'NotDefined','ExternalModule'}},
- {structured_error,{M,3},asn1ct_check,{undefined_import,'X','UndefinedModule'}},
- {structured_error,{M,4},asn1ct_check,{undefined_import,'Y','UndefinedModule2'}},
- {structured_error,{M,4},asn1ct_check,{undefined_import,'Z','UndefinedModule2'}}
+ {error,[{structured_error,{M,3},asn1ct_check,
+ {undefined_export, 'UndefinedType'}},
+ {structured_error,{M,5},asn1ct_check,
+ {undefined_import,'NonExistingImport',Ext}},
+ {structured_error,{M,5},asn1ct_check,
+ {undefined_import,'NotDefined',Ext}},
+ {structured_error,{M,7},asn1ct_check,
+ {undefined_import,'X','UndefinedModule'}},
+ {structured_error,{M,8},asn1ct_check,
+ {undefined_import,'Y','UndefinedModule2'}},
+ {structured_error,{M,8},asn1ct_check,
+ {undefined_import,'Z','UndefinedModule2'}}
]} = run(P, Config),
ok.
@@ -170,11 +262,14 @@ integers(Config) ->
" Int1 ::= INTEGER {a(1), a(1)}\n"
" Int2 ::= INTEGER {a(1), b(2), a(3)}\n"
" Int3 ::= INTEGER {x(1), y(1)}\n"
+ " i0 INTEGER ::= 1\n"
+ " Int4 ::= INTEGER {x(i0), y(undef) }\n"
"END\n">>},
{error,
[{structured_error,{M,2},asn1ct_check,{namelist_redefinition,a}},
{structured_error,{M,3},asn1ct_check,{namelist_redefinition,a}},
- {structured_error,{M,4},asn1ct_check,{value_reused,1}}
+ {structured_error,{M,4},asn1ct_check,{value_reused,1}},
+ {structured_error,{M,6},asn1ct_check,{undefined,undef}}
]} = run(P, Config),
ok.
@@ -188,6 +283,11 @@ objects(Config) ->
" obj3 CL ::= { &Data OCTET STRING }\n"
" obj4 SMALL ::= { &code 42 }\n"
" InvalidSet CL ::= { obj1 }\n"
+ " obj5 CL ::= {}\n"
+ " ErrSet ::= PT{ {PT{inst}}}\n"
+ " obj6 CL ::= 7\n"
+ " obj7 CL ::= int\n"
+ " obj8 NON-CLASS ::= { &id 1 }\n"
" CL ::= CLASS {\n"
" &code INTEGER UNIQUE,\n"
@@ -203,6 +303,12 @@ objects(Config) ->
" &code INTEGER UNIQUE,\n"
" &i INTEGER\n"
" }\n"
+
+ " PT{SMALL:Small} ::= SEQUENCE { a SMALL.&code ({Small}) }\n"
+ " inst SMALL ::= {&code 42, &i 4711}\n"
+
+ " int INTEGER ::= 42\n"
+ " NON-CLASS ::= SEQUENCE { a BOOLEAN }\n"
"END\n">>},
{error,
[
@@ -216,24 +322,490 @@ objects(Config) ->
{structured_error,{M,5},asn1ct_check,
{missing_mandatory_fields,[i],obj4}},
{structured_error,{M,6},asn1ct_check,
- {invalid_fields,[wrong],'InvalidSet'}}
+ {invalid_fields,[wrong],'InvalidSet'}},
+ {structured_error,{M,7},asn1ct_check,
+ {missing_mandatory_fields,
+ ['Data','Set','VarTypeValue',code,enum,object,
+ vartypevalue],obj5}},
+ {structured_error,{M,8},asn1ct_check,invalid_objectset},
+ {structured_error,{M,9},asn1ct_check,illegal_object},
+ {structured_error,{M,10},asn1ct_check,illegal_object},
+ {structured_error,{M,11},asn1ct_check,illegal_object}
+ ]
+ } = run(P, Config),
+ ok.
+
+object_field_extraction(Config) ->
+ M = 'ObjectFieldExtraction',
+ P = {M,
+ <<"ObjectFieldExtraction DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+
+ " DataObjSet DATA-CLASS ::= {\n"
+ " holder-object-1.&int,\n"
+ " ...\n"
+ " }\n"
+
+ " DataObjSetNoExt DATA-CLASS ::= {\n"
+ " holder-object-1.&int\n"
+ " }\n"
+
+ " holder-object-1 HOLDER-CLASS ::= {\n"
+ " &int 42\n"
+ " }\n"
+
+ " HOLDER-CLASS ::= CLASS {\n"
+ " &int INTEGER\n"
+ " }\n"
+
+ " DATA-CLASS ::= CLASS {\n"
+ " &id INTEGER\n"
+ " }\n"
+
+ "END\n">>},
+ {error,
+ [
+ {structured_error,{M,2},asn1ct_check,illegal_object},
+ {structured_error,{M,6},asn1ct_check,illegal_object}
+ ]
+ } = run(P, Config),
+ ok.
+
+object_sets(Config) ->
+ M = 'ObjectSets',
+ P = {M, <<"ObjectSets DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ "TEST-UNIQ ::= CLASS { &id INTEGER UNIQUE, &test INTEGER }\n"
+ "UniqSet TEST-UNIQ ::= { { &id 1, &test 1 } | {&id 1, &test 2} }\n"
+
+ "DOUBLE-UNIQ ::= CLASS { &id1 INTEGER UNIQUE,"
+ " &id INTEGER UNIQUE }\n"
+ "DoubleSet DOUBLE-UNIQ ::= { {&id1 1, &id2 2} }\n"
+ "END\n">>},
+ {error,
+ [{structured_error,{M,3},asn1ct_check,{non_unique_object,1}},
+ {structured_error,{M,5},asn1ct_check,multiple_uniqs}
+ ]
+ } = run(P, Config),
+ ok.
+
+oids(Config) ->
+ M = 'OIDS',
+ P = {M,<<"OIDS DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ "CONTAINER ::= CLASS { &id OBJECT IDENTIFIER UNIQUE,\n"
+ " &int INTEGER OPTIONAL,\n"
+ " &seq SEQUENCE { a INTEGER } OPTIONAL\n"
+ "}\n"
+
+ "-- This is line 6.\n"
+ "object-1 CONTAINER ::= { &id {1 2 3}, &int 42 }\n"
+ "object-2 CONTAINER ::= { &id {1 999}, &int 0 }\n"
+ "object-3 CONTAINER ::= { &id {1 2}, &seq { a 42 } }\n"
+ "oid-1 OBJECT IDENTIFIER ::= object-1.&int\n"
+ "oid-2 OBJECT IDENTIFIER ::= object-2.&id\n"
+ "oid-3 OBJECT IDENTIFIER ::= object-3.&seq\n"
+ "-- This is line 13.\n"
+
+ "oid-5 OBJECT IDENTIFIER ::= { a 42, b 19 }\n"
+
+ "oid-6 OBJECT IDENTIFIER ::= int\n"
+ "int INTEGER ::= 42\n"
+
+ "oid-7 OBJECT IDENTIFIER ::= seq\n"
+ "seq SEQUENCE { x INTEGER } ::= { x 11 }\n"
+
+ "oid-8 OBJECT IDENTIFIER ::= os\n"
+ "os OCTET STRING ::= '1234'H\n"
+
+ "oid-9 OBJECT IDENTIFIER ::= { 1 os }\n"
+
+ "oid-10 OBJECT IDENTIFIER ::= { 1 invalid }\n"
+
+ "-- This is line 23.\n"
+ "oid-11 OBJECT IDENTIFIER ::= { 0 legal-oid }\n"
+ "legal-oid OBJECT IDENTIFIER ::= {1 2 3}\n"
+
+ "bad-root-1 OBJECT IDENTIFIER ::= {99}\n"
+ "bad-root-2 OBJECT IDENTIFIER ::= {0 42}\n"
+
+ "oid-object-ref-1 OBJECT IDENTIFIER ::= object-1\n"
+ "oid-object-ref-2 OBJECT IDENTIFIER ::= { object-1 19 } \n"
+
+ "oid-int OBJECT IDENTIFIER ::= 42\n"
+ "oid-sequence OBJECT IDENTIFIER ::= {a 42, b 35}\n"
+
+ "END\n">>},
+ {error,
+ [
+ {structured_error,{M,8},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,10},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,11},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,12},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,14},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,15},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,17},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,19},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,21},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,22},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,24},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,26},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,27},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,28},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,29},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,30},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,31},asn1ct_check,{illegal_oid,o_id}}
]
} = run(P, Config),
ok.
+rel_oids(Config) ->
+ M = 'REL-OIDS',
+ P = {M,<<"REL-OIDS DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ "legal-oid OBJECT IDENTIFIER ::= {1 2}\n"
+ "legal-roid RELATIVE-OID ::= {1 2}\n"
+ "CONTAINER ::= CLASS { &oid OBJECT IDENTIFIER OPTIONAL,\n"
+ " &int INTEGER OPTIONAL,\n"
+ " &seq SEQUENCE { a INTEGER } OPTIONAL\n"
+ "}\n"
+ "object-1 CONTAINER ::= { &oid {1 2 3},\n"
+ " &int 42,\n",
+ " &seq {a 42}\n"
+ " }\n"
+
+ "wrong-type-rel-oid-1 RELATIVE-OID ::= legal-oid\n"
+ "wrong-type-rel-oid-2 RELATIVE-OID ::= object-1.&oid\n"
+ "wrong-type-rel-oid-3 RELATIVE-OID ::= object-1.&int\n"
+ "wrong-type-rel-oid-4 RELATIVE-OID ::= object-1.&seq\n"
+ "wrong-type-rel-oid-5 RELATIVE-OID ::= object-1.&undef\n"
+
+ "oid-bad-first OBJECT IDENTIFIER ::= {legal-roid 3}\n"
+ "END\n">>},
+ {error,
+ [
+ {structured_error,{M,12},asn1ct_check,{illegal_oid,rel_oid}},
+ {structured_error,{M,13},asn1ct_check,{illegal_oid,rel_oid}},
+ {structured_error,{M,14},asn1ct_check,{illegal_oid,rel_oid}},
+ {structured_error,{M,15},asn1ct_check,{illegal_oid,rel_oid}},
+ {structured_error,{M,16},asn1ct_check,{undefined_field,undef}},
+ {structured_error,{M,17},asn1ct_check,{illegal_oid,o_id}}
+ ]
+ } = run(P, Config),
+ ok.
+
+
parameterization(Config) ->
M = 'Parameterization',
P = {M,
<<"Parameterization DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
" NotUppercase{lowercase} ::= INTEGER (lowercase)\n"
+
+ " P{T1,T2} ::= SEQUENCE { a T1, b T2 }\n"
+ " S ::= P{OCTET STRING}\n"
+
+ " Seq ::= SEQUENCE { a INTEGER }\n"
+ " Sbad ::= Seq{INTEGER}\n"
+
+ "END\n">>},
+ {error,
+ [{structured_error,{M,2},asn1ct_check,
+ {illegal_typereference,lowercase}},
+ {structured_error,{M,4},asn1ct_check,
+ param_wrong_number_of_arguments},
+ {structured_error,{M,6},asn1ct_check,
+ {param_bad_type, 'Seq'}}
+ ]
+ } = run(P, Config),
+ ok.
+
+
+constructed(Config) ->
+ M = 'Const',
+ P = {M,
+ <<"Const DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ " Seq1 ::= SEQUENCE {a INTEGER, b BIT STRING, a BOOLEAN}\n"
+ " Ch ::= CHOICE {a INTEGER, b BIT STRING, a BOOLEAN}\n"
+ " Seq2 ::= SEQUENCE {COMPONENTS OF Ch}\n"
+ " CL ::= CLASS { &id INTEGER UNIQUE, &Type }\n"
+ " Seq3 ::= SEQUENCE { id CL.&id, d CL.&foo }\n"
+
+ " Seq4 ::= SEQUENCE { a INTEGER, z INTEGER OPTIONAL, b Set1 }\n"
+ " Set1 ::= SET { c BOOLEAN, d INTEGER }\n"
+ " s1 Seq4 ::= {a 42, b {c TRUE, zz 4711}}\n"
+ " s2 Seq4 ::= {a 42, b {c TRUE, d FALSE}}\n"
+ " s3 Seq4 ::= {a 42, b {c TRUE}}\n"
+ " s4 Seq4 ::= {a 42, b {c TRUE, d 4711}, zz 4712}\n"
+ " s5 Seq4 ::= {a 42}\n"
+ " s6 Seq4 ::= {a 42, zz 4712, b {c TRUE, d 4711}}\n"
"END\n">>},
{error,
- [{structured_error,{'Parameterization',2},asn1ct_check,
- {illegal_typereference,lowercase}}
- ]
- } = run(P, Config),
+ [{structured_error,{M,2},asn1ct_check,{duplicate_identifier,a}},
+ {structured_error,{M,3},asn1ct_check,{duplicate_identifier,a}},
+ {structured_error,{M,4},asn1ct_check,{illegal_COMPONENTS_OF,'Ch'}},
+ {structured_error,{M,6},asn1ct_check,{illegal_object_field,foo}},
+
+ {structured_error,{M,9},asn1ct_check,{illegal_id,zz}},
+ {structured_error,{M,10},asn1ct_check,illegal_integer_value},
+ {structured_error,{M,11},asn1ct_check,{missing_id,d}},
+ {structured_error,{M,12},asn1ct_check,{illegal_id,zz}},
+ {structured_error,{M,13},asn1ct_check,{missing_id,b}},
+ {structured_error,{M,14},asn1ct_check,{illegal_id,zz}}
+ ]
+ } = run(P, Config),
+ ok.
+
+syntax(Config) ->
+ M = 'Syntax',
+ P = {M,
+ <<"Syntax DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ " obj1 CL ::= { WRONG }\n"
+ " obj2 CL ::= { CODE 42 AGAIN WRONG }\n"
+ " obj3 CL ::= { INTEGER }\n"
+ " obj4 CL ::= { BIT STRING }\n"
+ " obj5 CL ::= { , }\n"
+ " obj6 CL ::= { CODE , }\n"
+ " obj7 CL ::= { CODE \"abc\" }\n"
+ " obj8 CL ::= { CODE }\n"
+ " obj9 CL ::= { CODE 42 ENUM}\n"
+ " obj10 CL ::= { CODE 42 ENUM BIT STRING}\n"
+
+ " obj11 CL ::= { CODE 42 TYPE 13}\n"
+ " obj12 CL ::= { CODE 42 TYPE d}\n"
+ " obj13 CL ::= { CODE 42 TYPE bs-value}\n"
+
+ " bad-syntax-1 BAD-SYNTAX-1 ::= { BAD 42 }\n"
+
+ " obj14 CL ::= { CODE 42 OBJ-SET integer }\n"
+ " obj15 CL ::= { CODE 42 OBJ-SET { A B } }\n"
+ " obj16 CL ::= { CODE 42 OBJ-SET SEQUENCE { an INTEGER } }\n"
+
+ " obj17 CL ::= { CODE 42 OID {seqtag 42} }\n"
+ " obj18 CL ::= { CODE 42 OID {seqtag 42, seqtag-again 43} }\n"
+ " obj19 CL ::= { CODE 42 OID {one 1 two 2} }\n"
+
+ " BAD-SYNTAX-1 ::= CLASS {\n"
+ " &code INTEGER UNIQUE\n"
+ " } WITH SYNTAX {\n"
+ " BAD &bad\n"
+ " }\n"
+
+ " BAD-SYNTAX-2 ::= CLASS {\n"
+ " &code INTEGER UNIQUE\n"
+ " } WITH SYNTAX {\n"
+ " BAD &Bad\n"
+ " }\n"
+
+ " BAD-SYNTAX-3 ::= CLASS {\n"
+ " &code INTEGER UNIQUE\n"
+ " } WITH SYNTAX {\n"
+ " [ID &code]\n"
+ " }\n"
+
+ " BAD-SYNTAX-4 ::= CLASS {\n"
+ " &code INTEGER UNIQUE\n"
+ " } WITH SYNTAX {\n"
+ " ID\n"
+ " }\n"
+
+ " BAD-SYNTAX-5 ::= CLASS {\n"
+ " &code INTEGER UNIQUE,\n"
+ " &Type\n"
+ " } WITH SYNTAX {\n"
+ " ID\n"
+ " }\n"
+
+ " BAD-SYNTAX-6 ::= CLASS {\n"
+ " &code INTEGER UNIQUE\n"
+ " } WITH SYNTAX {\n"
+ " ID &code, &code\n"
+ " }\n"
+
+ " BAD-SYNTAX-7 ::= CLASS {\n"
+ " &code INTEGER UNIQUE,\n"
+ " &Type\n"
+ " } WITH SYNTAX {\n"
+ " ID &Type, &code, &code, &Type\n"
+ " }\n"
+
+ " CL ::= CLASS {\n"
+ " &code INTEGER UNIQUE,\n"
+ " &enum ENUMERATED { a, b, c} OPTIONAL,\n"
+ " &Type OPTIONAL,\n"
+ " &ObjSet CL OPTIONAL,\n"
+ " &oid OBJECT IDENTIFIER OPTIONAL\n"
+ " } WITH SYNTAX {\n"
+ " CODE &code [ENUM &enum] [TYPE &Type] [OBJ-SET &ObjSet]\n"
+ " [OID &oid]\n"
+ " }\n"
+
+ " bs-value BIT STRING ::= '1011'B\n"
+
+ " integer INTEGER ::= 42\n"
+ "END\n">>},
+ {error,
+ [
+ {structured_error,{M,2},asn1ct_check,
+ {syntax_nomatch,"WRONG"}},
+ {structured_error,{M,3},asn1ct_check,
+ {syntax_nomatch,"AGAIN"}},
+ {structured_error,{M,4},asn1ct_check,
+ {syntax_nomatch,"INTEGER"}},
+ {structured_error,{M,5},asn1ct_check,
+ {syntax_nomatch,"BIT STRING"}},
+ {structured_error,{M,6},asn1ct_check,
+ {syntax_nomatch,"\",\""}},
+ {structured_error,{M,7},asn1ct_check,
+ {syntax_nomatch,"\",\""}},
+ {structured_error,{M,8},asn1ct_check,
+ {syntax_nomatch,"\"abc\""}},
+ {structured_error,{M,9},asn1ct_check,
+ syntax_nomatch},
+ {structured_error,{M,10},asn1ct_check,
+ syntax_nomatch},
+ {structured_error,{M,11},asn1ct_check,
+ {syntax_nomatch,"BIT STRING"}},
+ {structured_error,{M,12},asn1ct_check,
+ {syntax_nomatch,"13"}},
+ {structured_error,{M,13},asn1ct_check,
+ {syntax_nomatch,"d"}},
+ {structured_error,{M,14},asn1ct_check,
+ {syntax_nomatch,"bs-value"}},
+ {structured_error,{M,15},asn1ct_check,
+ {syntax_undefined_field,bad}},
+ {structured_error,{M,16},asn1ct_check,
+ {syntax_nomatch,"integer"}},
+ {structured_error,{M,17},asn1ct_check,
+ {syntax_nomatch,"\"A B\""}},
+ {structured_error,{M,18},asn1ct_check,
+ {syntax_nomatch,"SEQUENCE"}},
+ {structured_error,{M,19},asn1ct_check,
+ {syntax_nomatch,"\"seqtag 42\""}},
+ {structured_error,{M,20},asn1ct_check,
+ {syntax_nomatch,"\"seqtag 42 seqtag-again 43\""}},
+ {structured_error,{M,21},asn1ct_check,
+ {syntax_nomatch,"\"one 1 two 2\""}},
+ {structured_error,{M,22},asn1ct_check,
+ {syntax_undefined_field,bad}},
+ {structured_error,{M,27},asn1ct_check,
+ {syntax_undefined_field,'Bad'}},
+ {structured_error,{M,32},asn1ct_check,
+ {syntax_mandatory_in_optional_group,code}},
+ {structured_error,{M,37},asn1ct_check,
+ {syntax_missing_mandatory_fields,[code]}},
+ {structured_error,{M,42},asn1ct_check,
+ {syntax_missing_mandatory_fields,['Type',code]}},
+ {structured_error,{M,48},asn1ct_check,
+ {syntax_duplicated_fields,[code]}},
+ {structured_error,{M,53},asn1ct_check,
+ {syntax_duplicated_fields,['Type',code]}}
+ ]
+ } = run(P, Config),
+ ok.
+
+table_constraints(Config) ->
+ M = 'TableConstraints',
+ P = {M,
+ <<"TableConstraints DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ " Seq-1 ::= SEQUENCE {\n"
+ " contentType CONTENTS.&id,\n"
+ " content CONTENTS.&Type({Contents}{@contentType})\n"
+ " }\n"
+
+ " Seq-2 ::= SEQUENCE {\n"
+ " contentType INTEGER,\n"
+ " content CONTENTS.&Type({Contents}{@contentType})\n"
+ " }\n"
+
+ " Int ::= INTEGER ({1})\n"
+
+ " Seq-3 ::= SEQUENCE {\n"
+ " contentType CONTENTS.&id({1})\n"
+ " }\n"
+
+ "Contents CONTENTS ::= {\n"
+ " {OCTET STRING IDENTIFIED BY {2 1 1}}\n"
+ "}\n"
+
+ "CONTENTS ::= TYPE-IDENTIFIER\n"
+ "END\n">>},
+ {error,
+ [{structured_error,
+ {M,2},asn1ct_check,
+ {missing_table_constraint,contentType}},
+ {structured_error,
+ {M,6},asn1ct_check,
+ {missing_ocft,contentType}},
+ {structured_error,
+ {M,10},asn1ct_check,
+ illegal_table_constraint},
+ {structured_error,
+ {M,11},asn1ct_check,
+ invalid_table_constraint}
+ ]} = run(P, Config),
+ ok.
+
+tags(Config) ->
+ M = 'Tags',
+ P = {M,
+ <<"Tags DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ "SeqOpt1 ::= SEQUENCE\n"
+ "{\n"
+ "bool1 BOOLEAN OPTIONAL,\n"
+ "int1 INTEGER,\n"
+ "seq1 SeqIn OPTIONAL\n"
+ "}\n"
+
+ "SeqOpt1Imp ::= SEQUENCE \n"
+ "{\n"
+ "bool1 [1] BOOLEAN OPTIONAL,\n"
+ "int1 INTEGER,\n"
+ "seq1 [2] SeqIn OPTIONAL,\n"
+ "seq2 [2] SeqIn OPTIONAL,\n"
+ "...,\n"
+ "int2 [3] SeqIn,\n"
+ "int3 [3] SeqIn\n"
+ "}\n"
+
+ "SeqIn ::= SEQUENCE \n"
+ "{\n"
+ "boolIn BOOLEAN,\n"
+ "intIn INTEGER\n"
+ "}\n"
+ "\n"
+
+
+ "Set1 ::= SET {\n"
+ " os [0] OCTET STRING,\n"
+ " bool [0] BOOLEAN\n"
+ "}\n"
+
+ "Seq1 ::= SEQUENCE {\n"
+ "a [0] IMPLICIT Choice OPTIONAL\n"
+ "}\n"
+ "Seq2 ::= SEQUENCE {\n"
+ "a [0] IMPLICIT ANY OPTIONAL\n"
+ "}\n"
+ "Choice ::=\n"
+ "CHOICE {\n"
+ "a [0] BOOLEAN,\n"
+ "b [1] INTEGER\n"
+ "}\n"
+
+ "END\n">>},
+ {error,
+ [{structured_error,
+ {M,8},asn1ct_check,
+ {duplicate_tags,[seq1,seq2]}},
+ {structured_error,
+ {M,24},asn1ct_check,
+ {duplicate_tags,[bool,os]}},
+ {structured_error,
+ {M,28},asn1ct_check,
+ {implicit_tag_before,choice}},
+ {structured_error,
+ {M,31},asn1ct_check,
+ {implicit_tag_before,open_type}}
+ ]} = run(P, Config),
ok.
+
values(Config) ->
M = 'Values',
P = {M,
@@ -241,6 +813,53 @@ values(Config) ->
" os1 OCTET STRING ::= \"abc\"\n"
" os2 OCTET STRING ::= 42\n"
" os3 OCTET STRING ::= { 1, 3 }\n"
+ " os4 OCTET STRING ::= '1234'H\n"
+ " Seq ::= SEQUENCE {\n"
+ " an OCTET STRING\n"
+ " }\n"
+ " seq Seq ::= { an int }\n"
+ " os5 OCTET STRING ::= holder-1.&str\n"
+ " os6 OCTET STRING ::= int\n"
+
+ " int1 INTEGER ::= \"string\"\n"
+ " int2 INTEGER ::= os4\n"
+ " int3 INTEGER ::= not-defined\n"
+ " int4 INTEGER ::= holder-1.&str\n"
+ " int5 INTEGER ::= holder-2.&obj\n"
+ " int6 INTEGER ::= holder-2.&undefined-field\n"
+ " int7 INTEGER ::= holder-2.&UndefinedField.&id\n"
+
+ " bs1 BIT STRING ::= 42\n"
+ " bs2 BIT STRING ::= {a,b}\n"
+ " bs3 BIT STRING {a(0),z(25)} ::= {a,b}\n"
+ " bs4 BIT STRING {a(0),z(25)} ::= int\n"
+ " bs5 BIT STRING ::= holder-2.&str\n"
+ " bs6 BIT STRING ::= holder-2.&obj\n"
+
+ " b1 BOOLEAN ::= 42\n"
+ " b2 BOOLEAN ::= {a,b}\n"
+
+ " HOLDER ::= CLASS {\n"
+ " &str IA5String,\n"
+ " &obj HOLDER OPTIONAL\n"
+ " }\n"
+
+ " holder-1 HOLDER ::= { &str \"xyz\" }\n"
+ " holder-2 HOLDER ::= { &str \"xyz\", &obj holder-1 }\n"
+
+ " ext-1 EXTERNAL ::= {identification bad:{1 2 3}, data-value '123'H}\n"
+ " ext-2 EXTERNAL ::= {identification syntax:{1 2 3}, data '123'H}\n"
+
+ " CH ::= CHOICE { a INTEGER, b BOOLEAN }\n"
+ " ch1 CH ::= 2344\n"
+ " ch2 CH ::= zz:34\n"
+
+ " st1 an < Seq ::= 42\n"
+ " st2 zz < CH ::= 42\n"
+ " st3 a < HOLDER ::= 42\n"
+ " st4 a < INTEGER ::= 42\n"
+
+ " int INTEGER ::= 42\n"
"END\n">>},
{error,
[
@@ -249,7 +868,59 @@ values(Config) ->
{structured_error,{M,3},asn1ct_check,
illegal_octet_string_value},
{structured_error,{M,4},asn1ct_check,
- illegal_octet_string_value}
+ illegal_octet_string_value},
+ {structured_error,{M,9},asn1ct_check,
+ illegal_octet_string_value},
+ {structured_error,{M,10},asn1ct_check,
+ illegal_octet_string_value},
+ {structured_error,{M,11},asn1ct_check,
+ illegal_octet_string_value},
+ {structured_error,{M,12},asn1ct_check,
+ illegal_integer_value},
+ {structured_error,{M,13},asn1ct_check,
+ illegal_integer_value},
+ {structured_error,{M,14},asn1ct_check,
+ illegal_integer_value},
+ {structured_error,{M,15},asn1ct_check,
+ illegal_integer_value},
+ {structured_error,{M,16},asn1ct_check,
+ illegal_integer_value},
+ {structured_error,{M,17},asn1ct_check,
+ {undefined_field,'undefined-field'}},
+ {structured_error,{M,18},asn1ct_check,
+ {undefined_field,'UndefinedField'}},
+ {structured_error,{M,19},asn1ct_check,
+ {illegal_value, "BIT STRING"}},
+ {structured_error,{M,20},asn1ct_check,
+ {illegal_value, "BIT STRING"}},
+ {structured_error,{M,21},asn1ct_check,
+ {illegal_value, "BIT STRING"}},
+ {structured_error,{M,22},asn1ct_check,
+ {illegal_value, "BIT STRING"}},
+ {structured_error,{M,23},asn1ct_check,
+ {illegal_value, "BIT STRING"}},
+ {structured_error,{M,24},asn1ct_check,
+ {illegal_value, "BIT STRING"}},
+ {structured_error,{M,25},asn1ct_check,
+ {illegal_value, "BOOLEAN"}},
+ {structured_error,{M,26},asn1ct_check,
+ {illegal_value, "BOOLEAN"}},
+ {structured_error,{M,33},asn1ct_check,
+ illegal_external_value},
+ {structured_error,{M,34},asn1ct_check,
+ illegal_external_value},
+ {structured_error,{M,36},asn1ct_check,
+ {illegal_id, 2344}},
+ {structured_error,{M,37},asn1ct_check,
+ {illegal_id, zz}},
+ {structured_error,{M,38},asn1ct_check,
+ {illegal_choice_type, 'Seq'}},
+ {structured_error,{M,39},asn1ct_check,
+ {illegal_id, zz}},
+ {structured_error,{M,40},asn1ct_check,
+ {illegal_choice_type, 'HOLDER'}},
+ {structured_error,{M,41},asn1ct_check,
+ {illegal_choice_type, 'INTEGER'}}
]
} = run(P, Config),
ok.
@@ -258,5 +929,7 @@ values(Config) ->
run({Mod,Spec}, Config) ->
Base = atom_to_list(Mod) ++ ".asn1",
File = filename:join(?config(priv_dir, Config), Base),
+ Include0 = filename:dirname(?config(data_dir, Config)),
+ Include = filename:join(filename:dirname(Include0), "asn1_SUITE_data"),
ok = file:write_file(File, Spec),
- asn1ct:compile(File).
+ asn1ct:compile(File, [{i, Include}]).
diff --git a/lib/asn1/test/syntax_SUITE.erl b/lib/asn1/test/syntax_SUITE.erl
new file mode 100644
index 0000000000..1a2c938fe5
--- /dev/null
+++ b/lib/asn1/test/syntax_SUITE.erl
@@ -0,0 +1,340 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(syntax_SUITE).
+-export([suite/0,all/0,groups/0,
+ assignment/1,
+ class/1,
+ constraints/1,
+ exports/1,
+ header/1,
+ imports/1,
+ objects/1,
+ sequence/1,
+ syntax/1,
+ tokenizer/1,
+ types/1,
+ values/1]).
+
+-include_lib("test_server/include/test_server.hrl").
+
+suite() -> [{ct_hooks, [ts_install_cth]}].
+
+all() ->
+ [{group,p}].
+
+groups() ->
+ [{p,parallel(),
+ [assignment,
+ class,
+ constraints,
+ exports,
+ header,
+ imports,
+ objects,
+ sequence,
+ syntax,
+ tokenizer,
+ types,
+ values]}].
+
+parallel() ->
+ case erlang:system_info(schedulers) > 1 of
+ true -> [parallel];
+ false -> []
+ end.
+
+assignment(Config) ->
+ Head = "Assignment DEFINITIONS AUTOMATIC TAGS ::=\nBEGIN\n",
+ End = "\nEND\n",
+ L0 = [{"42",3,{syntax_error,42}},
+ {"i",4,{syntax_error,'END'}},
+ {"i ::=",3,{syntax_error,'::='}},
+ {"i type",4,{syntax_error,'END'}},
+ {"i type ::=",3,{syntax_error,'::='}},
+ {"i TYPE",4,{syntax_error,'END'}},
+ {"i TYPE ::= ",4,{syntax_error,'END'}},
+ {"i INTEGER ::= 42 garbage",4,{syntax_error,'END'}},
+ {"i{T} Type",4,{syntax_error,'END'}},
+ {"TYPE",4,{syntax_error,'END'}},
+ {"TYPE ::=",4,{syntax_error,'END'}},
+ {"TYPE{ ::=",3,{syntax_error,'::='}},
+ {"TYPE{P, ::=",3,{syntax_error,'::='}},
+ {"TYPE{P,} ::=",3,{syntax_error,'}'}},
+ {"TYPE{Gov:} ::=",3,{syntax_error,':'}},
+ {"TYPE{A} CL ",4,{syntax_error,'END'}},
+ {"ObjSet CL",4,{syntax_error,'END'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Assignment", Config),
+ ok.
+
+class(Config) ->
+ Head = "Class DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " CL ::= CLASS {",
+ End = "\nEND\n",
+ L0 = [{"id",3,{syntax_error,'id'}},
+ {"&id INTEGER",4,{syntax_error,'END'}},
+ {"&id INTEGER,",4,{syntax_error,'END'}},
+ {"&id,",3,{syntax_error,','}},
+ {"&id OPTIONAL",3,{syntax_error,'OPTIONAL'}},
+ {"&id INTEGER OPTIONAL",4,{syntax_error,'END'}},
+ {"&var &Field",4,{syntax_error,'END'}},
+ {"&Type,",4,{syntax_error,'END'}},
+ {"&Type OPTIONAL",4,{syntax_error,'END'}},
+ {"&ValueSet INTEGER OPTIONAL",4,{syntax_error,'END'}},
+ {"&ValueSet INTEGER DEFAULT",4,{syntax_error,'END'}},
+ {"&ValueSet INTEGER DEFAULT {",4,{syntax_error,'END'}},
+ {"&ValueSet INTEGER DEFAULT {a",4,{syntax_error,'END'}},
+ {"&Var &Field",4,{syntax_error,'END'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Class", Config),
+ ok.
+
+constraints(Config) ->
+ Head = "Constraints DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " Type ::= ",
+ End = "\nEND\n",
+ L0 = [{"INTEGER (",4,{syntax_error,'END'}},
+ {"INTEGER (10x",3,{syntax_error,x}},
+ {"INTEGER (10|(10y",3,{syntax_error,y}},
+ {"INTEGER (CONSTRAINED BY {}",4,{syntax_error,'END'}},
+ {"INTEGER (CONSTRAINED BY {INTEGER garbage",3,
+ {syntax_error,garbage}},
+ {"INTEGER ({ObjSet",4,{syntax_error,'END'}},
+ {"INTEGER ({ObjSet}{",3,{syntax_error,'{'}},
+ {"INTEGER ({ObjSet}{@",3,{syntax_error,'{'}},
+ {"INTEGER ({ObjSet}{@x",3,{syntax_error,'{'}},
+ {"INTEGER ({ObjSet}{@x}",4,{syntax_error,'END'}},
+ {"INTEGER (10 !BOOLEAN",4,{syntax_error,'END'}},
+ {"INTEGER (10 !BOOLEAN:",4,{syntax_error,'END'}},
+ {"INTEGER (10 !BOOLEAN:FALSE",4,{syntax_error,'END'}},
+ {"SEQUENCE {} (WITH COMPONENTS { Type })",
+ 3,{syntax_error,'Type'}},
+ {"SEQUENCE {} (WITH COMPONENTS { x (10)",
+ 4,{syntax_error,'END'}},
+ {"SEQUENCE {} (WITH COMPONENTS { ..., x (10)",
+ 4,{syntax_error,'END'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Constraints", Config),
+ ok.
+
+exports(Config) ->
+ Head = "Exports DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " EXPORTS ",
+ End = "\nEND\n",
+ L0 = [{"Type",4,{syntax_error,'END'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Exports", Config),
+ ok.
+
+header(Config) ->
+ L = [{"lowercase",1,{syntax_error,lowercase}},
+ {"H ",2,{syntax_error,'END-OF-FILE'}},
+ {"H-",1,{syntax_error,'-'}},
+ {"42",1,{syntax_error,42}},
+ {"H definitions",1,{syntax_error,definitions}},
+ {"H DEFINITIONS STUPID TAGS",1,{syntax_error,'STUPID'}},
+ {"H DEFINITIONS WHATEVER",1,{syntax_error,'WHATEVER'}},
+ {"H DEFINITIONS ::= BEGIN",2,{syntax_error,'END-OF-FILE'}},
+ {"BOOLEAN",1,{syntax_error,'BOOLEAN'}}
+ ],
+ run(L, "H", Config),
+ ok.
+
+imports(Config) ->
+ Head = "Imports DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " IMPORTS ",
+ End = "\nEND\n",
+ L0 = [{"Type FROM X",4,{syntax_error,'END'}},
+ {"Symbols TO Y",3,{syntax_error,'TO'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Imports", Config),
+ ok.
+
+objects(Config) ->
+ Head = "Objects DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " object CLASS-NAME ::= ",
+ End = "\nEND\n",
+ L0 = [{"{",4,{syntax_error,'END'}},
+ {"{&min 1, max 10}",3,{syntax_error,max}},
+ {"{&min 1, Max 10}",3,{syntax_error,'Max'}},
+ {"{min 1, &max 10}",3,{syntax_error,'&max'}},
+ {"{min 1, &Max 10}",3,{syntax_error,'&Max'}},
+ {"{RESERVERD WORD BIT}",3,{syntax_error,'BIT'}},
+ {"{&min 1",4,{syntax_error,'END'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Objects", Config),
+ ok.
+
+sequence(Config) ->
+ Head = "Sequence DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " Type ::= SEQUENCE {",
+ End = "\nEND\n",
+ L0 = [{"",4,{syntax_error,'END'}},
+ {" UpperCase",3,{syntax_error,'UpperCase'}},
+ {" a b",4,{syntax_error,'END'}},
+ {" i INTEGER",4,{syntax_error,'END'}},
+ {" ...",4,{syntax_error,'END'}},
+ {" ..., [[",4,{syntax_error,'END'}},
+ {" ..., [[ a INTEGER ]",3,{syntax_error,']'}},
+ {" ..., [[ a INTEGER,",3,{syntax_error,','}},
+ {" ..., [[ a INTEGER, ... ]]",3,{syntax_error,','}},
+ {" ... !42 xxx",3,{syntax_error,'xxx'}},
+ {" ... !42, a INTEGER,",3,{syntax_error,','}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Sequence", Config),
+ ok.
+
+syntax(Config) ->
+ Head = "Syntax DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " CL ::= CLASS { &id INTEGER UNIQUE } WITH SYNTAX ",
+ End = "\nEND\n",
+ L0 = [{"{}",3,{syntax_error,'}'}},
+ {"WORD",3,{syntax_error,'WORD'}},
+ {"{ Word }",3,{syntax_error,'Word'}},
+ {"{ [ Word ] }",3,{syntax_error,'Word'}},
+ {"{ [ WORD }",3,{syntax_error,'}'}},
+ {"{ WORD;",3,{syntax_error,';'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Syntax", Config),
+ ok.
+
+tokenizer(Config) ->
+ Head = "Tokenize DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n",
+ End = "\nEND\n",
+ L0 = [{"'",3,eol_in_token},
+ {"'42'B",3,{invalid_binary_number,"42"}},
+ {"'ZZZ'H",3,{invalid_hex_number,"ZZZ"}},
+ {"\"abc",3,missing_quote_at_eof},
+ {"/*",3,eof_in_comment}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Tokenizer", Config, asn1ct_tok),
+ ok.
+
+types(Config) ->
+ Head = "Types DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " Type ::= ",
+ End = "\nEND\n",
+ L0 = [{"BIT STRING garbage",4,{syntax_error,'END'}},
+ {"BIT STRING {",4,{syntax_error,'END'}},
+ {"BIT STRING { a(42",3,{syntax_error,42}},
+ {"BIT STRING { a(0)",4,{syntax_error,'END'}},
+ {"CHOICE {",4,{syntax_error,'END'}},
+ {"CHOICE { ..., a}",3,{syntax_error,'...'}},
+ {"CHOICE { UpperCase",3,{syntax_error,'UpperCase'}},
+ {"CHOICE { i INTEGER",4,{syntax_error,'END'}},
+ {"CHOICE { ..., i INTEGER }",3,{syntax_error,'...'}},
+ {"CHOICE { b BOOLEAN, ..., i INTEGER",
+ 4,{syntax_error,'END'}},
+ {"CHOICE { b BOOLEAN, ..., [[ e BOOLEAN, ...]]}",
+ 3,{syntax_error,','}},
+ {"CHOICE { b BOOLEAN, ..., i INTEGER, ..., x BIT STRING}",
+ 3,{syntax_error,','}},
+ {"ENUMERATED {",4,{syntax_error,'END'}},
+ {"ENUMERATED { 42 }",3,{syntax_error,42}},
+ {"ENUMERATED { a, b",4,{syntax_error,'END'}},
+ {"ENUMERATED { a, }",3,{syntax_error,','}},
+ {"ENUMERATED { a, ...,\nb, ..., c }",4,{syntax_error,','}},
+ {"INTEGER {",4,{syntax_error,'END'}},
+ {"INTEGER { a(42)",4,{syntax_error,'END'}},
+ {"SEQUENCE",3,{syntax_error,'SEQUENCE'}},
+ %% More tests for SEQUENCE in sequence/1.
+ {"SEQUENCE SIZE (1..10)",4,{syntax_error,'END'}},
+ {"SEQUENCE (SIZE (1..10))",4,{syntax_error,'END'}},
+ {"SET { i INTEGER",4,{syntax_error,'END'}},
+ {"SET { ...",4,{syntax_error,'END'}},
+ {"SET SIZE (1..10)",4,{syntax_error,'END'}},
+ {"SET (SIZE (1..10))",4,{syntax_error,'END'}},
+ {"SET { ... !42 xxx",3,{syntax_error,'xxx'}},
+ {"SET { ... !42, a INTEGER,",3,{syntax_error,','}},
+ {"[",4,{syntax_error,'END'}},
+ {"[42",4,{syntax_error,'END'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Types", Config),
+ ok.
+
+values(Config) ->
+ Head = "Values DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " value Type ::= ",
+ End = "\nEND\n",
+ L0 = [{"",4,{syntax_error,'END'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Values", Config),
+ ok.
+
+run(List, File, Config) ->
+ run(List, File, Config, asn1ct_parser2).
+
+run(List, File0, Config, Module) ->
+ Base = File0 ++ ".asn1",
+ File = filename:join(?config(priv_dir, Config), Base),
+ case run_1(List, Base, File, Module, 0) of
+ 0 -> ok;
+ Errors -> ?t:fail(Errors)
+ end.
+
+run_1([{Source,Line,Error}=Exp|T], Base, File, Module, N) ->
+ ok = file:write_file(File, Source),
+ io:format("~s", [Source]),
+ case asn1ct:compile(File) of
+ {error,[{structured_error,{Base,L},Module,E}]} ->
+ case {L,E} of
+ {Line,Error} ->
+ run_1(T, Base, File, Module, N);
+ {Line,OtherError} ->
+ io:format("*** Wrong error: ~p, expected ~p ***\n",
+ [OtherError,Error]),
+ run_1(T, Base, File, Module, N+1);
+ {OtherLine,Error} ->
+ io:format("*** Wrong line: ~p, expected ~p ***\n",
+ [OtherLine,Line]),
+ run_1(T, Base, File, Module, N+1);
+ {_,_} ->
+ io:format("*** Wrong line: ~p, expected ~p ***",
+ [L,Line]),
+ io:format("*** Wrong error: ~p, expected ~p ***\n",
+ [E,Error]),
+ run_1(T, Base, File, Module, N+1)
+ end;
+ Other ->
+ io:format("~p\nGOT: ~p", [Exp,Other])
+ end;
+run_1([], _, _, _, N) ->
+ N.
diff --git a/lib/asn1/test/testChoExtension.erl b/lib/asn1/test/testChoExtension.erl
index 09e19ceebb..f36d6c1cbf 100644
--- a/lib/asn1/test/testChoExtension.erl
+++ b/lib/asn1/test/testChoExtension.erl
@@ -39,11 +39,6 @@ extension(_Rules) ->
roundtrip('ChoExt3', {int,33}),
roundtrip('ChoExt4', {str,<<"abc">>}),
- roundtrip('ChoEmptyRoot', {bool,false}),
- roundtrip('ChoEmptyRoot', {bool,true}),
- roundtrip('ChoEmptyRoot', {int,0}),
- roundtrip('ChoEmptyRoot', {int,7}),
-
ok.
diff --git a/lib/asn1/test/testConstraints.erl b/lib/asn1/test/testConstraints.erl
index 3ccf883bd6..5d65cb8d73 100644
--- a/lib/asn1/test/testConstraints.erl
+++ b/lib/asn1/test/testConstraints.erl
@@ -231,6 +231,28 @@ int_constraints(Rules) ->
seq_roundtrip(Rules, 'SeqOverlapping', 'SeqNonOverlapping', 19000),
seq_roundtrip(Rules, 'SeqOverlapping', 'SeqNonOverlapping', 26900),
+ %%==========================================================
+ %% Constraints from object fields.
+ %%==========================================================
+ range_error(Rules, 'IntObjectConstr', 1),
+ roundtrip('IntObjectConstr', 2),
+ roundtrip('IntObjectConstr', 3),
+ roundtrip('IntObjectConstr', 4),
+ range_error(Rules, 'IntObjectConstr', 5),
+
+
+ %%==========================================================
+ %% INTEGER constraints defined using named INTEGERs.
+ %%==========================================================
+ 42 = 'Constraints':'constrainedNamedInt-1'(),
+ 100 = 'Constraints':'constrainedNamedInt-2'(),
+ range_error(Rules, 'ConstrainedNamedInt', 41),
+ roundtrip('ConstrainedNamedInt', v1),
+ range_error(Rules, 'ConstrainedNamedInt', 43),
+
+ range_error(Rules, 'SeqWithNamedInt', {'SeqWithNamedInt',-100}),
+ roundtrip('SeqWithNamedInt', {'SeqWithNamedInt',v2}),
+
ok.
%% PER: Ensure that if the lower bound is Lb, Lb+16#80 is encoded
diff --git a/lib/asn1/test/testDoubleEllipses.erl b/lib/asn1/test/testDoubleEllipses.erl
index 4e8972cdfc..3caa166ae0 100644
--- a/lib/asn1/test/testDoubleEllipses.erl
+++ b/lib/asn1/test/testDoubleEllipses.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -24,17 +24,20 @@
-include_lib("test_server/include/test_server.hrl").
-record('Seq',{a, c}).
+-record('SeqV1',{a, b}).
-record('SeqV2',{a, b ,c}).
-record('SeqAlt',{a,d,b,e,c,f,g}).
-record('SeqAltV2',{a,d,b,e,h,i,c,f,g}).
-record('Set',{a, c}).
+-record('SetV1',{a, b}).
-record('SetV2',{a, b ,c}).
-record('SetAlt',{a,d,b,e,c,f,g}).
-record('SetAltV2',{a,d,b,e,h,i,c,f,g}).
main(_Rules) ->
roundtrip('Seq', #'Seq'{a=10,c=true}),
+ roundtrip('SeqV1', #'SeqV1'{a=10,b=false}),
roundtrip('SeqV2', #'SeqV2'{a=10,b=false,c=true}),
roundtrip('SeqAlt',
#'SeqAlt'{a=10,d=12,b = <<2#1010:4>>,
@@ -45,6 +48,7 @@ main(_Rules) ->
e=true,h="PS",i=13,c=false,f=14,g=16}),
roundtrip('Set', #'Set'{a=10,c=true}),
+ roundtrip('SetV1', #'SetV1'{a=10,b=false}),
roundtrip('SetV2', #'SetV2'{a=10,b=false,c=true}),
roundtrip('SetAlt',
#'SetAlt'{a=10,d=12,
diff --git a/lib/asn1/test/testEnumExt.erl b/lib/asn1/test/testEnumExt.erl
index 878518be11..29995d6340 100644
--- a/lib/asn1/test/testEnumExt.erl
+++ b/lib/asn1/test/testEnumExt.erl
@@ -78,6 +78,9 @@ common(Erule) ->
v_roundtrip(Erule, 'SeqBig', {'SeqBig',true,e40,9357}),
v_roundtrip(Erule, 'SeqBig', {'SeqBig',true,e80,9357}),
+
+ v_roundtrip(Erule, 'EnumSkip', d),
+
ok.
roundtrip(Type, Value) ->
@@ -85,11 +88,20 @@ roundtrip(Type, Value) ->
v_roundtrip(Erule, Type, Value) ->
Encoded = roundtrip(Type, Value),
- Encoded = asn1_test_lib:hex_to_bin(v(Erule, Value)).
-
-v(ber, {'SeqBig',true,e40,9357}) -> "300A8001 FF810141 8202248D";
-v(ber, {'SeqBig',true,e80,9357}) -> "300B8001 FF810200 81820224 8D";
-v(per, {'SeqBig',true,e40,9357}) -> "E0014002 248D";
-v(per, {'SeqBig',true,e80,9357}) -> "E0018002 248D";
-v(uper, {'SeqBig',true,e40,9357}) -> "E0280044 91A0";
-v(uper, {'SeqBig',true,e80,9357}) -> "E0300044 91A0".
+ Encoded = asn1_test_lib:hex_to_bin(v(Erule, Type, Value)).
+
+v(Erule, 'SeqBig', Value) ->
+ v_seq_big(Erule, Value);
+v(Erule, 'EnumSkip', Value) ->
+ v_enum_skip(Erule, Value).
+
+v_seq_big(ber, {'SeqBig',true,e40,9357}) -> "300A8001 FF810141 8202248D";
+v_seq_big(ber, {'SeqBig',true,e80,9357}) -> "300B8001 FF810200 81820224 8D";
+v_seq_big(per, {'SeqBig',true,e40,9357}) -> "E0014002 248D";
+v_seq_big(per, {'SeqBig',true,e80,9357}) -> "E0018002 248D";
+v_seq_big(uper, {'SeqBig',true,e40,9357}) -> "E0280044 91A0";
+v_seq_big(uper, {'SeqBig',true,e80,9357}) -> "E0300044 91A0".
+
+v_enum_skip(per, d) -> "82";
+v_enum_skip(uper, d) -> "82";
+v_enum_skip(ber, d) -> "0A0103".
diff --git a/lib/asn1/test/testExtensibilityImplied.erl b/lib/asn1/test/testExtensibilityImplied.erl
new file mode 100644
index 0000000000..8049bb6e53
--- /dev/null
+++ b/lib/asn1/test/testExtensibilityImplied.erl
@@ -0,0 +1,29 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+
+-module(testExtensibilityImplied).
+-export([main/0]).
+
+main() ->
+ M = 'ExtensibilityImplied',
+ {'Seq2',true} = M:decode('Seq2', M:encode('Seq1', {'Seq1',true,42})),
+ {'Set2',true} = M:decode('Set2', M:encode('Set1', {'Set1',true,42})),
+ {asn1_enum,_} = M:decode('Enum2', M:encode('Enum1', ext)),
+ ok.
diff --git a/lib/asn1/test/testImporting.erl b/lib/asn1/test/testImporting.erl
new file mode 100644
index 0000000000..de8beae38b
--- /dev/null
+++ b/lib/asn1/test/testImporting.erl
@@ -0,0 +1,34 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+
+-module(testImporting).
+-export([main/0]).
+
+main() ->
+ M = 'Importing',
+ roundtrip('Seq', {'Seq',5}),
+ roundtrip('OtherSeq', {'Seq',42,true}),
+ {'Seq',42,true} = M:seq(),
+ roundtrip('ObjSeq', {'ObjSeq',1,<<"XYZ">>}),
+ roundtrip('ObjSeq', {'ObjSeq',2,19}),
+ ok.
+
+roundtrip(Type, Value) ->
+ asn1_test_lib:roundtrip('Importing', Type, Value).
diff --git a/lib/asn1/test/testInfObj.erl b/lib/asn1/test/testInfObj.erl
index 37c134b1b9..3044d5cd2a 100644
--- a/lib/asn1/test/testInfObj.erl
+++ b/lib/asn1/test/testInfObj.erl
@@ -74,6 +74,12 @@ main(_Erule) ->
{'ConstructedPdu',7,[]}),
roundtrip('InfObj', 'ConstructedPdu',
{'ConstructedPdu',7,[64,1,19,17,35]}),
+ {'ConstructedPdu',8,[{_,-15,35},{_,533,-70}]} =
+ enc_dec('InfObj', 'ConstructedPdu',
+ {'ConstructedPdu',8,[{'_',-15,35},{'_',533,-70}]}),
+ {'ConstructedPdu',9,[{RecTag9,-15,35},{RecTag9,533,-70}]} =
+ enc_dec('InfObj', 'ConstructedPdu',
+ {'ConstructedPdu',9,[{'_',-15,35},{'_',533,-70}]}),
roundtrip('InfObj', 'ConstructedSet',
{'ConstructedSet',1,{'CONSTRUCTED-DEFAULT_Type',-2001,true}}),
@@ -96,6 +102,12 @@ main(_Erule) ->
{'ConstructedSet',7,[]}),
roundtrip('InfObj', 'ConstructedSet',
{'ConstructedSet',7,[64,1,19,17,35]}),
+ {'ConstructedSet',8,[{_,-15,35},{_,533,-70}]} =
+ enc_dec('InfObj', 'ConstructedSet',
+ {'ConstructedSet',8,[{'_',-15,35},{'_',533,-70}]}),
+ {'ConstructedSet',9,[{_,-15,35},{_,533,-70}]} =
+ enc_dec('InfObj', 'ConstructedSet',
+ {'ConstructedSet',9,[{'_',-15,35},{'_',533,-70}]}),
roundtrip('InfObj', 'Seq2',
{'Seq2',42,[true,false,false,true],
@@ -126,12 +138,37 @@ main(_Erule) ->
test_objset('OstSeq45', [4,5]),
test_objset('OstSeq12345', [1,2,3,4,5]),
+ test_objset('OstSeq12Except', [1,2]),
+ test_objset('OstSeq123Except', [1,2]),
+
test_objset('ExOstSeq12', [1,2]),
test_objset('ExOstSeq123', [1,2,3]),
- %%test_objset('ExOstSeq1234', [1,2,3,4]),
+ test_objset('ExOstSeq1234', [1,2,3,4]),
test_objset('ExOstSeq45', [4,5]),
test_objset('ExOstSeq12345', [1,2,3,4,5]),
+ test_objset('ExOstSeq12Except', [1,2]),
+ test_objset('ExOstSeq123Except', [1,2]),
+
+ roundtrip('InfObj', 'ExtClassSeq', {'ExtClassSeq', 4}),
+
+ {1,2,42} = 'InfObj':'value-1'(),
+ {1,2,42,25} = 'InfObj':'value-2'(),
+ {100,101} = 'InfObj':'value-3'(),
+ {1,2,100,101} = 'InfObj':'value-4'(),
+
+ roundtrip('InfObj', 'Rdn', {'Rdn',{2,5,4,41},"abc"}),
+
+ roundtrip('InfObj', 'TiAliasSeq',
+ {'TiAliasSeq',{'TiAliasSeq_prf',{2,1,2},'NULL'}}),
+
+ roundtrip('InfObj', 'ContentInfo',
+ {'ContentInfo',{2,7,8,9},"string"}),
+ {2,7,8,9} = 'InfObj':'id-content-type'(),
+
+ <<2#1011:4>> = 'InfObj':'tricky-bit-string'(),
+ <<16#CAFE:16>> = 'InfObj':'tricky-octet-string'(),
+
ok.
test_objset(Type, Keys) ->
diff --git a/lib/asn1/test/testInfObjExtract.erl b/lib/asn1/test/testInfObjExtract.erl
new file mode 100644
index 0000000000..0ef967c1f6
--- /dev/null
+++ b/lib/asn1/test/testInfObjExtract.erl
@@ -0,0 +1,72 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+
+-module(testInfObjExtract).
+
+-export([main/0]).
+
+main() ->
+ roundtrip_data_object_13('DataSeq-1'),
+
+ roundtrip_data_object_1('DataSeq-2'),
+ roundtrip_data_object_1('DataSeq-3'),
+ roundtrip_data_object_1('DataSeq-4'),
+
+ roundtrip_data_object_13('DataSeq-5'),
+ roundtrip_data_object_13('DataSeq-6'),
+
+ roundtrip_data_object_1('DataSeqSingleSet-1'),
+ roundtrip_data_object_1('DataSeqSingleSet-2'),
+
+ roundtrip('ObjClassSeq-1', {'ObjClassSeq-1',1,true}),
+ roundtrip('ObjClassSeq-1', {'ObjClassSeq-1',2,true}),
+
+ roundtrip_error('ObjClassSeq-1', {'ObjClassSeq-1',0,false}),
+ roundtrip_error('ObjClassSeq-1', {'ObjClassSeq-1',3,true}),
+ roundtrip_error('ObjClassSeq-1', {'ObjClassSeq-1',4,false}),
+ roundtrip_error('ObjClassSeq-1', {'ObjClassSeq-1',5,true}),
+
+ ok.
+
+roundtrip_data_object_13(SeqType) ->
+ roundtrip(SeqType, {SeqType,1,true}),
+ roundtrip(SeqType, {SeqType,2,<<"abc">>}),
+ roundtrip(SeqType, {SeqType,3,<<42:5>>}),
+ roundtrip_error(SeqType, {SeqType,4,42}).
+
+roundtrip_data_object_1(SeqType) ->
+ roundtrip(SeqType, {SeqType,1,false}),
+ roundtrip(SeqType, {SeqType,1,true}),
+ roundtrip_error(SeqType, {SeqType,1,42}),
+ roundtrip_error(SeqType, {SeqType,2,<<"abc">>}),
+ roundtrip_error(SeqType, {SeqType,3,<<42:5>>}),
+ roundtrip_error(SeqType, {SeqType,999,42}).
+
+roundtrip(T, V) ->
+ asn1_test_lib:roundtrip('InfObjExtract', T, V).
+
+roundtrip_error(T, V) ->
+ try asn1_test_lib:roundtrip('InfObjExtract', T, V) of
+ ok ->
+ test_server:fail()
+ catch
+ _:_ ->
+ ok
+ end.
diff --git a/lib/asn1/test/testParamBasic.erl b/lib/asn1/test/testParamBasic.erl
index 39f7947e8d..5f6116bba4 100644
--- a/lib/asn1/test/testParamBasic.erl
+++ b/lib/asn1/test/testParamBasic.erl
@@ -46,6 +46,14 @@ main(Rules) ->
roundtrip('AnAlgorithm', {'AnAlgorithm',1,42}),
roundtrip('AnAlgorithm', {'AnAlgorithm',2,true}),
roundtrip('AnAlgorithm', {'AnAlgorithm',2,false}),
+ {'AnAlgorithm',1,42} = 'ParamBasic':'alg-seq-1'(),
+ {'AnAlgorithm',2,true} = 'ParamBasic':'alg-seq-2'(),
+
+ roundtrip('Seq', {'Seq',
+ {'Seq_c1',{2,1,1},42},
+ {'Seq_c2',{2,1,1,1},asn1_NOVALUE}}),
+
+ {_,{2,9,9,9,7},'NULL'} = 'ParamBasic':'algid-hmacWithSHA1'(),
ok.
roundtrip(Type, Value) ->
diff --git a/lib/asn1/test/testPrim.erl b/lib/asn1/test/testPrim.erl
index e07379e634..d7893a2d58 100644
--- a/lib/asn1/test/testPrim.erl
+++ b/lib/asn1/test/testPrim.erl
@@ -98,6 +98,11 @@ enum(Rules) ->
ber ->
ok
end,
+
+ roundtrip('NegEnumVal', neg),
+ roundtrip('NegEnumVal', zero),
+ roundtrip('EnumVal128', val),
+
ok.
diff --git a/lib/asn1/test/testRfcs.erl b/lib/asn1/test/testRfcs.erl
new file mode 100644
index 0000000000..6281d09873
--- /dev/null
+++ b/lib/asn1/test/testRfcs.erl
@@ -0,0 +1,75 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+
+-module(testRfcs).
+
+-export([compile/3,test/0]).
+
+-include_lib("test_server/include/test_server.hrl").
+
+compile(Config, Erules, Options0) ->
+ Options = [no_ok_wrapper|Options0],
+ DataDir = ?config(data_dir, Config),
+ Specs0 = filelib:wildcard("*.asn1", filename:join(DataDir, rfcs)),
+ Specs = [filename:join(rfcs, Spec) || Spec <- Specs0],
+ 122 = length(Specs),
+ CaseDir = ?config(case_dir, Config),
+ asn1_test_lib:compile_all(Specs, Config, [Erules,{i,CaseDir}|Options]).
+
+test() ->
+ {1,3,6,1,5,5,7,48,1,2} =
+ IdPkixOcspNonce =
+ 'OCSP-2009':'id-pkix-ocsp-nonce'(),
+ roundtrip('OCSP-2009', 'OCSPRequest',
+ {'OCSPRequest',
+ {'TBSRequest',
+ 0,
+ {rfc822Name,"name string"},
+ [{'Request',
+ {'CertID',{'_',{2,9,3,4,5},asn1_NOVALUE},
+ <<"POTATOHASH">>,<<"HASHBROWN">>,42},
+ [{'_',IdPkixOcspNonce,true,<<34,159,16,57,199>>}]}],
+ asn1_NOVALUE},
+ asn1_NOVALUE}),
+ otp_7759(),
+ ok.
+
+roundtrip(Module, Type, Value0) ->
+ Enc = Module:encode(Type, Value0),
+ Value1 = Module:decode(Type, Enc),
+ asn1_test_lib:match_value(Value0, Value1),
+ ok.
+
+otp_7759() ->
+ %% The release note for asn-1.6.6 says:
+ %% Decode of an open_type when the value was empty tagged
+ %% type encoded with indefinite length failed.
+ Mod = 'OLD-PKCS7',
+ Encoded = encoded_msg(),
+ ContentInfo = Mod:decode('ContentInfo', Encoded),
+ io:format("~p\n", [ContentInfo]),
+ {'ContentInfo',_Id,PKCS7_content} = ContentInfo,
+ X = Mod:decode('SignedData', PKCS7_content),
+ io:format("~p\n", [X]),
+ io:nl(),
+ ok.
+
+encoded_msg() ->
+ <<48,128,6,9,42,134,72,134,247,13,1,7,2,160,128,48,128,2,1,1,49,11,48,9,6,5,43,14,3,2,26,5,0,48,128,6,9,42,134,72,134,247,13,1,7,1,160,128,36,128,0,0,0,0,0,0, 49,130,1,192,48,130,1,188,2,1,1,48,50,48,38,49,17,48,15,6,3,85,4,3,12,8,65,100,109,105,110,67,65,49,49,17,48,15,6,3,85,4,10,12,8,69,82,73,67,83,83,79,78,2,8,15,151,245,186,21,23,240,96,48,9,6,5,43,14,3,2,26,5,0,160,129,229,48,17,6,10,96,134,72,1,134,248,69,1,9,2,49,3,19,1,51,48,17,6,10,96,134,72,1,134,248,69,1,9,3,49,3,19,1,51,48,24,6,9,42,134,72,134,247,13,1,9,3,49,11,6,9,42,134,72,134,247,13,1,7,1,48,28,6,9,42,134,72,134,247,13,1,9,5,49,15,23,13,48,56,49,50,49,48,48,57,53,52,50,51,90,48,28,6,10,96,134,72,1,134,248,69,1,9,7,49,14,19,12,49,53,50,56,49,52,50,52,48,57,53,53,48,32,6,10,96,134,72,1,134,248,69,1,9,5,49,18,4,16,165,115,177,71,78,88,239,113,78,56,98,98,18,202,217,235,48,32,6,10,96,134,72,1,134,248,69,1,9,6,49,18,4,16,227,174,230,251,43,153,252,65,11,93,231,83,34,18,55,46,48,35,6,9,42,134,72,134,247,13,1,9,4,49,22,4,20,218,57,163,238,94,107,75,13,50,85,191,239,149,96,24,144,175,216,7,9,48,13,6,9,42,134,72,134,247,13,1,1,1,5,0,4,129,128,106,233,116,125,140,51,133,173,63,41,54,138,214,211,89,215,169,125,98,77,16,222,216,240,211,79,125,111,87,186,73,63,253,204,107,102,177,63,174,197,224,212,231,172,149,246,33,68,223,67,102,93,64,152,152,5,216,102,247,134,36,197,150,236,57,77,56,138,95,71,204,31,23,149,241,213,78,172,165,249,100,187,12,45,19,57,67,120,54,63,15,239,41,217,127,61,254,60,201,104,68,3,135,214,206,93,253,255,192,94,56,107,68,210,57,61,41,249,47,156,130,244,52,12,163,216,236,69,0,0,0,0,0,0>>.
diff --git a/lib/asn1/test/testSelectionTypes.erl b/lib/asn1/test/testSelectionTypes.erl
index 6d060321da..7d273fe656 100644
--- a/lib/asn1/test/testSelectionTypes.erl
+++ b/lib/asn1/test/testSelectionTypes.erl
@@ -23,10 +23,34 @@
-include_lib("test_server/include/test_server.hrl").
test() ->
- Val = ["PrintableString","PrintableString","PrintableString"],
["Es"] = Val2 = ['SelectionType':einsteinium()],
- roundtrip('MendeleyevTable', Val),
+ roundtrip('MendeleyevTable', ["fox","tree","cat","stone"]),
roundtrip('MendeleyevTable', Val2),
+ roundtrip('MendeleyevSet', [42,57,93,101]),
+
+ M = 'SelectionType',
+ true = M:boolv(),
+ 4 = M:intv(),
+ <<2#1001:4>> = M:bsv(),
+ <<16#3130:16>> = M:osv(),
+ 'NULL' = M:nullv(),
+ {2,1,1} = M:oiv(),
+ "ObjectDesc" = M:odv(),
+ "utf8" = M:utfv(),
+ {5,32767,256} = M:rov(),
+ "089" = M:numsv(),
+ "telet" = M:teletv(),
+ "t61" = M:t61v(),
+ "video" = M:videov(),
+ "ia5" = M:ia5v(),
+ "9805281429Z" = M:utctimev(),
+ "19980528142905.1" = M:gTime(),
+ "graphic" = M:gsv(),
+ "visible" = M:vsv(),
+ "general" = M:gStringv(),
+ "Universal" = M:univv(),
+ "bmp" = M:bmov(),
+
ok.
roundtrip(T, V) ->
diff --git a/lib/asn1/test/testUniqueObjectSets.erl b/lib/asn1/test/testUniqueObjectSets.erl
new file mode 100644
index 0000000000..1ef61a885a
--- /dev/null
+++ b/lib/asn1/test/testUniqueObjectSets.erl
@@ -0,0 +1,175 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+
+-module(testUniqueObjectSets).
+-export([main/3]).
+
+%% Run-time function called by the generated code.
+seq_roundtrip(I, D0) ->
+ M = 'UniqueObjectSets',
+ try
+ {ok,Enc} = M:encode('Seq', {'Seq',I,D0}),
+ {ok,{'Seq',I,D}} = M:decode('Seq', Enc),
+ D
+ catch C:E ->
+ Stk = erlang:get_stacktrace(),
+ io:format("FAILED: ~p ~p\n", [I,D0]),
+ erlang:raise(C, E, Stk)
+ end.
+
+types() ->
+ [{"CHOICE { a INTEGER, b BIT STRING }", {b,<<42:3>>}},
+ {"INTEGER",42},
+ {"SEQUENCE {a OCTET STRING}",{'_',<<"abc">>}},
+ {"SEQUENCE {b BOOLEAN, ...}",{'_',true}},
+ {"SEQUENCE {b BOOLEAN, ..., s IA5String, ..., e ENUMERATED { x, y, z}}",
+ {'_',false,"string",y}},
+ {"SET {a BIT STRING}",{'_',<<1:17>>}},
+ {"SEQUENCE OF INTEGER",[-19,0,555,777]},
+ {"SET OF BOOLEAN",[true,false,true]},
+ {"SEQUENCE OF SEQUENCE {x INTEGER (0..7)}",[{'_',7},{'_',0}]},
+ {"SET OF SEQUENCE {x INTEGER (0..7)}",[{'_',7},{'_',0}]}
+ ].
+
+main(CaseDir, Rule, Opts) ->
+ D0 = types(),
+ {D1,_} = lists:mapfoldl(fun({T,S}, I) ->
+ {{I,T,S},I+1}
+ end, 1, D0),
+ Types = [gen_types(I, Type) || {I,Type,_} <- D1],
+ Set = [gen_set_items(I, T) || {I,T,_} <- D1],
+ Objs = [gen_obj(I) || {I,_,_} <- D1],
+ DupObjs = [gen_dup_obj(I, T) || {I,T,_} <- D1],
+ DupObjRefs0 = [gen_dup_obj_refs(I) || {I,_,_} <- D1],
+ DupObjRefs = string:join(DupObjRefs0, " |\n"),
+ Asn1Spec = 'UniqueObjectSets',
+ A = ["UniqueObjectSets DEFINITIONS AUTOMATIC TAGS ::=\n",
+ "BEGIN\n\n",
+ "TEST-UNIQUE ::= CLASS {\n"
+ " &id INTEGER UNIQUE,\n"
+ " &Type OPTIONAL\n"
+ "}\n"
+ "WITH SYNTAX {IDENTIFIED BY &id [TYPE &Type]}\n",
+ $\n,
+ "DUP-CONTAINER ::= CLASS {\n"
+ " &id INTEGER UNIQUE,\n"
+ " &data TEST-UNIQUE\n"
+ "} WITH SYNTAX {\n"
+ " ID &id, &data\n"
+ "}\n",
+ $\n,
+ Types,$\n,
+ "UniqSet TEST-UNIQUE ::= {\n",
+ Set,
+ " DupSet-1 |\n",
+ " DupSet-2, ...\n",
+ "}\n\n",
+ Objs,$\n,
+ DupObjs,$\n,
+ "DupSet-1 TEST-UNIQUE ::= {\n",
+ DupObjRefs,$\n,
+ "}\n\n",
+ "DupSet-2 TEST-UNIQUE ::= {\n",
+ DupObjRefs,",...\n",
+ "}\n\n",
+ "Seq ::= SEQUENCE {\n"
+ " id TEST-UNIQUE.&id ({UniqSet}),\n"
+ " type TEST-UNIQUE.&Type ({UniqSet}{@id})\n"
+ "}\n"
+ "END\n"],
+ Asn1File = filename:join(CaseDir, atom_to_list(Asn1Spec)++".asn1"),
+ ok = file:write_file(Asn1File, A),
+
+ TestModule = 'unique_object_sets',
+ Test0 = [gen_test(I, Data) || {I,_,Data} <- D1],
+ Test = ["-module(",atom_to_list(TestModule),").\n"
+ "-export([main/1]).\n"
+ "\n"
+ "main(SeqRoundtrip) ->\n",
+ " ",atom_to_list(Rule)," = '",atom_to_list(Asn1Spec),
+ "':encoding_rule(),\n",
+ Test0,
+ " ok.\n"
+ ],
+ ErlFile = filename:join(CaseDir, atom_to_list(TestModule)++".erl"),
+ ok = file:write_file(ErlFile, Test),
+
+ io:format("~s\n~s\n", [Asn1File,ErlFile]),
+ case Rule of
+ per ->
+ io:put_chars([A,$\n,Test,$\n]);
+ _ ->
+ ok
+ end,
+
+ ok = asn1ct:compile(Asn1File, [Rule,{outdir,CaseDir}|Opts]),
+ {ok,TestModule} = c:c(ErlFile, [{outdir,CaseDir}]),
+ TestModule:main(fun seq_roundtrip/2),
+ ok.
+
+gen_types(I, Type) ->
+ io_lib:format("AType~p ::= ~s\n", [I,Type]).
+
+gen_set_items(I, T) ->
+ io_lib:format(" {IDENTIFIED BY ~p TYPE AType~p} |\n"
+ " {IDENTIFIED BY ~p TYPE AType~p} |\n"
+ " {IDENTIFIED BY ~p TYPE ~s} |\n"
+ " obj-~p |\n\n",
+ [I,I,I,I,I,T,I]).
+
+gen_obj(I) ->
+ io_lib:format("obj-~p TEST-UNIQUE ::= {IDENTIFIED BY ~p TYPE AType~p}\n",
+ [I,I,I]).
+
+gen_dup_obj(I, T) ->
+ io_lib:format("dup-obj-~p DUP-CONTAINER ::= "
+ "{ID ~p, {IDENTIFIED BY ~p TYPE ~s}}\n",
+ [I,I,I+1000,T]).
+
+gen_dup_obj_refs(I) ->
+ io_lib:format("dup-obj-~p.&data", [I]).
+
+gen_test(I, Data) ->
+ io_lib:format(" ~s = SeqRoundtrip(~p, ~p),\n",
+ [match_term(Data),I,Data]).
+
+match_term('_') ->
+ "_";
+match_term([H|T]=L) ->
+ case is_intlist(L) of
+ true ->
+ io_lib:format("~p", [L]);
+ false ->
+ ["[",match_term(H),"|",match_term(T),"]"]
+ end;
+match_term(Tuple) when is_tuple(Tuple) ->
+ ["{",match_term_tuple(Tuple, 1),"}"];
+match_term(Other) ->
+ io_lib:format("~p", [Other]).
+
+match_term_tuple(T, I) when I =< tuple_size(T) ->
+ [match_term(element(I, T)),
+ if I < tuple_size(T) -> ",";
+ true -> "" end|match_term_tuple(T, I+1)];
+match_term_tuple(_, _) ->
+ [].
+
+is_intlist(L) ->
+ lists:all(fun is_integer/1, L).
diff --git a/lib/asn1/test/testValueTest.erl b/lib/asn1/test/testValueTest.erl
new file mode 100644
index 0000000000..8a8e973621
--- /dev/null
+++ b/lib/asn1/test/testValueTest.erl
@@ -0,0 +1,114 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+-module(testValueTest).
+
+-export([main/0]).
+
+main() ->
+ M = 'ValueTest',
+
+ %% Basic types
+ 12 = M:'vANY'(),
+ true = M:'vBOOLEAN'(),
+ 12 = M:'vINTEGER'(),
+ 0 = M:'vINTEGERNNL'(),
+ button1 = M:'vENUMERATED'(),
+ [zero,two] = M:'vBS'(),
+ 'NULL' = M:'vNULL'(),
+ <<16#31,16#32,16#33>> = M:'vOS'(),
+
+ %% OID
+ {2,1,1} = M:'vOD'(),
+ {1,2} = M:'integer-first'(),
+ {2,4,5} = M:'rel-oid-1'(),
+ {0,2,4,5} = M:'include-roid'(),
+ {1,2,1} = M:'include-oid'(),
+ {1,2,1,2,4,5,42} = M:'include-all'(),
+
+ %% Character strings
+ "01234567" = M:'numericstring'(),
+ "PrintableString" = M:'printablestring'(),
+ "VisibleString" = M:'visiblestring'(),
+ [0,13] = M:'cr'(),
+ ["First line",[0,13],"Second line"] = M:'ia5string1'(),
+ [[5,5],[4,4],[6,6]] = M:'ia5string2'(),
+ "TeletexString" = M:'teletexstring'(),
+ "VideotexString" = M:'videotexstring'(),
+ "97100211-0500" = M:'utctime'(),
+ "19971002103130.5" = M:'generalizedtime'(),
+ "ObjectDescriptor" = M:'objectdescriptor'(),
+ "GraphicString" = M:'graphicstring'(),
+ "GeneralString" = M:'generalstring'(),
+ "BMPString" = M:'bmpstring1'(),
+ [0,0,0,65] = M:'latinCapitalLetterA'(),
+ [0,0,3,145] = M:'greekCapitalLetterSigma'(),
+ ["This is a capital A: ",
+ [0,0,0,65],
+ ", and a capital sigma: ",
+ [0,0,3,145],
+ "; try and spot the difference!"] = M:'my-universalstring'(),
+
+ %% Integers
+ 42 = M:someInteger(),
+ 42 = M:otherInteger(),
+ {'IntegerSeq',42} = M:integerSeq1(),
+
+ %% Value from object
+ 2 = M:'int-from-object-1'(),
+ 4 = M:'int-from-object-2'(),
+ roundtrip_error('II', 1),
+ roundtrip('II', 2),
+ roundtrip('II', 3),
+ roundtrip('II', 4),
+ roundtrip_error('II', 5),
+
+ %% Recursive value definitions.
+ {'OctetStringSeq',<<16#40,16#41,16#42>>} = M:octetStringSeq1(),
+ <<16#40,16#41,16#42>> = M:otherOctetString(),
+ <<16#40,16#41,16#42>> = M:someOctetString(),
+ {'OctetStringSeq',<<16#40,16#41,16#42>>} = M:octetStringSeq2(),
+ {'OctetStringSeq',<<16#40,16#41,16#FF>>} = M:octetStringSeq3(),
+ <<16#40,16#41,16#FF>> = M:'os-1'(),
+ <<16#40,16#41,16#FF>> = M:'os-2'(),
+
+ %% Recursive BIT STRING definitions.
+ {'BsSeq',<<2#101101:6>>,[c]} = M:bsSeq1(),
+ {'BsSeq',<<2#101101:6>>,[c]} = M:bsSeq2(),
+ {'BsSeq',<<2#101:3>>,[a,c]} = M:bsSeq3(),
+ <<2#101101:6>> = M:someBitString(),
+ <<2#101101:6>> = M:otherBitString(),
+ <<2#101:3>> = M:bsFromObject(),
+ <<2#101:3>> = M:bsFromObjectInd(),
+ [c] = M:someNamedBs(),
+ [c] = M:someOtherNamedBs(),
+
+ ok.
+
+
+roundtrip(T, V) ->
+ asn1_test_lib:roundtrip('ValueTest', T, V).
+
+roundtrip_error(T, V) ->
+ try asn1_test_lib:roundtrip('ValueTest', T, V) of
+ ok ->
+ test_server:fail()
+ catch _:_ ->
+ ok
+ end.
diff --git a/lib/asn1/test/testX420.erl b/lib/asn1/test/testX420.erl
deleted file mode 100644
index 4ddc55dc16..0000000000
--- a/lib/asn1/test/testX420.erl
+++ /dev/null
@@ -1,93 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2008-2013. All Rights Reserved.
-%%
-%% The contents of this file are subject to the Erlang Public License,
-%% Version 1.1, (the "License"); you may not use this file except in
-%% compliance with the License. You should have received a copy of the
-%% Erlang Public License along with this software. If not, it can be
-%% retrieved online at http://www.erlang.org/.
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% %CopyrightEnd%
-%%
-%%
-%%-------------------------------------------------------------------
-
--module(testX420).
-
--export([compile/3, ticket7759/2]).
-
--include_lib("test_server/include/test_server.hrl").
-
-
-compile(Erule, Options, Config) ->
- Specs0 = specs(),
- 99 = length(Specs0),
- CaseDir = ?config(case_dir, Config),
- Specs = [filename:join(x420, Spec) || Spec <- Specs0],
- asn1_test_lib:compile_all(Specs, Config, [Erule,{i,CaseDir}|Options]).
-
-specs() ->
- ["ACSE-1", "AuthenticationFramework", "BasicAccessControl",
- "CertificateExtensions", "Character-Coding-Attributes",
- "Character-Presentation-Attributes", "Character-Profile-Attributes",
- "Colour-Attributes", "DOR-definition", "DSAOperationalAttributeTypes",
- "Default-Value-Lists", "DirectoryAbstractService",
- "DirectoryAccessProtocol", "DirectoryInformationShadowProtocol",
- "DirectoryOperationalBindingManagementProtocol",
- "DirectoryOperationalBindingTypes", "DirectoryProtectionMappings",
- "DirectoryShadowAbstractService", "DirectorySystemProtocol",
- "DistributedOperations", "Document-Profile-Descriptor",
- "EnhancedSecurity", "External-References", "GULSProtectionMappings",
- "GenericProtectingTransferSyntax", "Geo-Gr-Coding-Attributes",
- "Geo-Gr-Presentation-Attributes", "Geo-Gr-Profile-Attributes",
- "GulsSecurityExchanges", "GulsSecurityTransformations",
- "HierarchicalOperationalBindings", "IPMSAbstractService",
- "IPMSAutoActionTypes", "IPMSExtendedBodyPartTypes",
- "IPMSExtendedBodyPartTypes2", "IPMSExtendedVoiceBodyPartType",
- "IPMSFileTransferBodyPartType", "IPMSForwardedContentBodyPartType",
- "IPMSForwardedReportBodyPartType", "IPMSFunctionalObjects",
- "IPMSHeadingExtensions", "IPMSInformationObjects",
- "IPMSMessageStoreAttributes", "IPMSObjectIdentifiers",
- "IPMSObjectIdentifiers2", "IPMSSecurityExtensions", "IPMSUpperBounds",
- "ISO-STANDARD-9541-FONT-ATTRIBUTE-SET", "ISO8571-FTAM", "ISO9541-SN",
- "Identifiers-and-Expressions", "InformationFramework",
- "Interchange-Data-Elements", "Layout-Descriptors", "Link-Descriptors",
- "Location-Expressions", "Logical-Descriptors", "MHSObjectIdentifiers",
- "MHSProtocolObjectIdentifiers", "MSAbstractService",
- "MSAccessProtocol", "MSGeneralAttributeTypes",
- "MSGeneralAutoActionTypes", "MSMatchingRules", "MSObjectIdentifiers",
- "MSUpperBounds", "MTAAbstractService", "MTSAbstractService",
- "MTSAbstractService88", "MTSAccessProtocol", "MTSObjectIdentifiers",
- "MTSUpperBounds", "Notation", "ObjectIdentifiers",
- "OperationalBindingManagement", "PKCS7", "PKCS7BodyPartType",
- "Protected-Part-Descriptors", "ProtocolObjectIdentifiers",
- "Raster-Gr-Coding-Attributes", "Raster-Gr-Presentation-Attributes",
- "Raster-Gr-Profile-Attributes", "Reliable-Transfer-APDU",
- "Remote-Operations-Abstract-Syntaxes",
- "Remote-Operations-Generic-ROS-PDUs",
- "Remote-Operations-Information-Objects-extensions",
- "Remote-Operations-Information-Objects",
- "Remote-Operations-Realizations",
- "Remote-Operations-Useful-Definitions", "SelectedAttributeTypes",
- "SeseAPDUs", "SpkmGssTokens", "Style-Descriptors", "Subprofiles",
- "Temporal-Relationships", "Text-Units", "UpperBounds",
- "UsefulDefinitions", "Videotex-Coding-Attributes"].
-
-ticket7759(_Erule,_Config) ->
- Encoded = encoded_msg(),
- io:format("Testing ticket7759 ...~n",[]),
- {ok, ContentInfo} = 'PKCS7':decode('ContentInfo',Encoded),
- {'ContentInfo',_Id,PKCS7_content} = ContentInfo,
- {ok,_} = 'PKCS7':decode('SignedData',PKCS7_content),
- ok.
-
-
-encoded_msg() ->
- <<48,128,6,9,42,134,72,134,247,13,1,7,2,160,128,48,128,2,1,1,49,11,48,9,6,5,43,14,3,2,26,5,0,48,128,6,9,42,134,72,134,247,13,1,7,1,160,128,36,128,0,0,0,0,0,0, 49,130,1,192,48,130,1,188,2,1,1,48,50,48,38,49,17,48,15,6,3,85,4,3,12,8,65,100,109,105,110,67,65,49,49,17,48,15,6,3,85,4,10,12,8,69,82,73,67,83,83,79,78,2,8,15,151,245,186,21,23,240,96,48,9,6,5,43,14,3,2,26,5,0,160,129,229,48,17,6,10,96,134,72,1,134,248,69,1,9,2,49,3,19,1,51,48,17,6,10,96,134,72,1,134,248,69,1,9,3,49,3,19,1,51,48,24,6,9,42,134,72,134,247,13,1,9,3,49,11,6,9,42,134,72,134,247,13,1,7,1,48,28,6,9,42,134,72,134,247,13,1,9,5,49,15,23,13,48,56,49,50,49,48,48,57,53,52,50,51,90,48,28,6,10,96,134,72,1,134,248,69,1,9,7,49,14,19,12,49,53,50,56,49,52,50,52,48,57,53,53,48,32,6,10,96,134,72,1,134,248,69,1,9,5,49,18,4,16,165,115,177,71,78,88,239,113,78,56,98,98,18,202,217,235,48,32,6,10,96,134,72,1,134,248,69,1,9,6,49,18,4,16,227,174,230,251,43,153,252,65,11,93,231,83,34,18,55,46,48,35,6,9,42,134,72,134,247,13,1,9,4,49,22,4,20,218,57,163,238,94,107,75,13,50,85,191,239,149,96,24,144,175,216,7,9,48,13,6,9,42,134,72,134,247,13,1,1,1,5,0,4,129,128,106,233,116,125,140,51,133,173,63,41,54,138,214,211,89,215,169,125,98,77,16,222,216,240,211,79,125,111,87,186,73,63,253,204,107,102,177,63,174,197,224,212,231,172,149,246,33,68,223,67,102,93,64,152,152,5,216,102,247,134,36,197,150,236,57,77,56,138,95,71,204,31,23,149,241,213,78,172,165,249,100,187,12,45,19,57,67,120,54,63,15,239,41,217,127,61,254,60,201,104,68,3,135,214,206,93,253,255,192,94,56,107,68,210,57,61,41,249,47,156,130,244,52,12,163,216,236,69,0,0,0,0,0,0>>.
diff --git a/lib/asn1/test/test_compile_options.erl b/lib/asn1/test/test_compile_options.erl
index 7f358e863c..4b6357a395 100644
--- a/lib/asn1/test/test_compile_options.erl
+++ b/lib/asn1/test/test_compile_options.erl
@@ -24,7 +24,7 @@
-export([wrong_path/1,comp/2,path/1,ticket_6143/1,noobj/1,
- record_name_prefix/1,verbose/1,warnings_as_errors/1]).
+ record_name_prefix/1,verbose/1]).
%% OTP-5689
wrong_path(Config) ->
@@ -132,43 +132,6 @@ verbose(Config) when is_list(Config) ->
?line [] = test_server:capture_get(),
ok.
-warnings_as_errors(Config) when is_list(Config) ->
- PrivDir = ?config(priv_dir,Config),
- Asn1File = filename:join([PrivDir,"WERROR.asn1"]),
- OutFile = filename:join([PrivDir,"WERROR.erl"]),
- Opts = [{outdir,PrivDir},noobj,verbose],
-
- %% Generate WERR.asn to emit warning
- %% Warning: Wrong format of type/value
- %% false/{'Externalvaluereference',_,'WERR',noInvokeId}
- Warn = <<"WERROR DEFINITIONS IMPLICIT TAGS ::=\n"
- "\n"
- "BEGIN\n"
- "\n"
- "InvokeId ::= CHOICE\n"
- "{\n"
- " present INTEGER,\n"
- " absent NULL\n"
- "}\n"
- "\n"
- "noInvokeId InvokeId ::= absent:NULL\n"
- "\n"
- "NoInvokeId InvokeId ::= {noInvokeId}\n"
- "\n"
- "END -- end of useful definitions.\n">>,
- ?line ok = file:write_file(Asn1File, Warn),
-
- %% Test warnings_as_errors compile
- ?line false = filelib:is_regular(OutFile),
- ?line {error, _} = asn1ct:compile(Asn1File, [warnings_as_errors|Opts]),
- ?line false = filelib:is_regular(OutFile),
-
- %% Test normal compile
- ?line ok = asn1ct:compile(Asn1File, Opts),
- ?line true = filelib:is_regular(OutFile),
- ?line ok = file:delete(OutFile),
- ok.
-
outfiles_check(OutDir) ->
outfiles_check(OutDir,outfiles1()).
diff --git a/lib/common_test/priv/Makefile.in b/lib/common_test/priv/Makefile.in
index 5a9fabbe45..1bc6b82ebb 100644
--- a/lib/common_test/priv/Makefile.in
+++ b/lib/common_test/priv/Makefile.in
@@ -71,7 +71,7 @@ debug opt:
$(V_at)sed -e 's;@CT_VSN@;$(VSN);' \
-e 's;@TS_VSN@;$(TEST_SERVER_VSN);' \
../install.sh.in > install.sh
- $(V_at)chmod 775 install.sh
+ - $(V_at)chmod -f 775 install.sh
docs:
diff --git a/lib/common_test/src/ct_cover.erl b/lib/common_test/src/ct_cover.erl
index c7f446dee9..b630a51835 100644
--- a/lib/common_test/src/ct_cover.erl
+++ b/lib/common_test/src/ct_cover.erl
@@ -174,7 +174,7 @@ get_spec_test(File) ->
[] -> [#cover{app=none, level=details}];
_ -> Res
end,
- case get_cover_opts(Apps, Terms, []) of
+ case get_cover_opts(Apps, Terms, Dir, []) of
E = {error,_} ->
E;
[CoverSpec] ->
@@ -205,124 +205,125 @@ collect_apps([], Apps) ->
%% get_cover_opts(Terms) -> AppCoverInfo
%% AppCoverInfo: [#cover{app=App,...}]
-get_cover_opts([App | Apps], Terms, CoverInfo) ->
- case get_app_info(App, Terms) of
+get_cover_opts([App | Apps], Terms, Dir, CoverInfo) ->
+ case get_app_info(App, Terms, Dir) of
E = {error,_} -> E;
AppInfo ->
AppInfo1 = files2mods(AppInfo),
- get_cover_opts(Apps, Terms, [AppInfo1|CoverInfo])
+ get_cover_opts(Apps, Terms, Dir, [AppInfo1|CoverInfo])
end;
-get_cover_opts([], _, CoverInfo) ->
+get_cover_opts([], _, _, CoverInfo) ->
lists:reverse(CoverInfo).
-%% get_app_info(App, Terms) -> App1
+%% get_app_info(App, Terms, Dir) -> App1
-get_app_info(App=#cover{app=none}, [{incl_dirs,Dirs}|Terms]) ->
- get_app_info(App, [{incl_dirs,none,Dirs}|Terms]);
-get_app_info(App=#cover{app=Name}, [{incl_dirs,Name,Dirs}|Terms]) ->
- case get_files(Dirs, ".beam", false, []) of
+get_app_info(App=#cover{app=none}, [{incl_dirs,Dirs}|Terms], Dir) ->
+ get_app_info(App, [{incl_dirs,none,Dirs}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{incl_dirs,Name,Dirs}|Terms], Dir) ->
+ case get_files(Dirs, Dir, ".beam", false, []) of
E = {error,_} -> E;
Mods1 ->
Mods = App#cover.incl_mods,
- get_app_info(App#cover{incl_mods=Mods++Mods1},Terms)
+ get_app_info(App#cover{incl_mods=Mods++Mods1},Terms,Dir)
end;
-get_app_info(App=#cover{app=none}, [{incl_dirs_r,Dirs}|Terms]) ->
- get_app_info(App, [{incl_dirs_r,none,Dirs}|Terms]);
-get_app_info(App=#cover{app=Name}, [{incl_dirs_r,Name,Dirs}|Terms]) ->
- case get_files(Dirs, ".beam", true, []) of
+get_app_info(App=#cover{app=none}, [{incl_dirs_r,Dirs}|Terms], Dir) ->
+ get_app_info(App, [{incl_dirs_r,none,Dirs}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{incl_dirs_r,Name,Dirs}|Terms], Dir) ->
+ case get_files(Dirs, Dir, ".beam", true, []) of
E = {error,_} -> E;
Mods1 ->
Mods = App#cover.incl_mods,
- get_app_info(App#cover{incl_mods=Mods++Mods1},Terms)
+ get_app_info(App#cover{incl_mods=Mods++Mods1},Terms,Dir)
end;
-get_app_info(App=#cover{app=none}, [{incl_mods,Mods1}|Terms]) ->
- get_app_info(App, [{incl_mods,none,Mods1}|Terms]);
-get_app_info(App=#cover{app=Name}, [{incl_mods,Name,Mods1}|Terms]) ->
+get_app_info(App=#cover{app=none}, [{incl_mods,Mods1}|Terms], Dir) ->
+ get_app_info(App, [{incl_mods,none,Mods1}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{incl_mods,Name,Mods1}|Terms], Dir) ->
Mods = App#cover.incl_mods,
- get_app_info(App#cover{incl_mods=Mods++Mods1},Terms);
+ get_app_info(App#cover{incl_mods=Mods++Mods1},Terms,Dir);
-get_app_info(App=#cover{app=none}, [{excl_dirs,Dirs}|Terms]) ->
- get_app_info(App, [{excl_dirs,none,Dirs}|Terms]);
-get_app_info(App=#cover{app=Name}, [{excl_dirs,Name,Dirs}|Terms]) ->
- case get_files(Dirs, ".beam", false, []) of
+get_app_info(App=#cover{app=none}, [{excl_dirs,Dirs}|Terms], Dir) ->
+ get_app_info(App, [{excl_dirs,none,Dirs}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{excl_dirs,Name,Dirs}|Terms], Dir) ->
+ case get_files(Dirs, Dir, ".beam", false, []) of
E = {error,_} -> E;
Mods1 ->
Mods = App#cover.excl_mods,
- get_app_info(App#cover{excl_mods=Mods++Mods1},Terms)
+ get_app_info(App#cover{excl_mods=Mods++Mods1},Terms,Dir)
end;
-get_app_info(App=#cover{app=none}, [{excl_dirs_r,Dirs}|Terms]) ->
- get_app_info(App, [{excl_dirs_r,none,Dirs}|Terms]);
-get_app_info(App=#cover{app=Name}, [{excl_dirs_r,Name,Dirs}|Terms]) ->
- case get_files(Dirs, ".beam", true, []) of
+get_app_info(App=#cover{app=none}, [{excl_dirs_r,Dirs}|Terms],Dir) ->
+ get_app_info(App, [{excl_dirs_r,none,Dirs}|Terms],Dir);
+get_app_info(App=#cover{app=Name}, [{excl_dirs_r,Name,Dirs}|Terms],Dir) ->
+ case get_files(Dirs, Dir, ".beam", true, []) of
E = {error,_} -> E;
Mods1 ->
Mods = App#cover.excl_mods,
- get_app_info(App#cover{excl_mods=Mods++Mods1},Terms)
+ get_app_info(App#cover{excl_mods=Mods++Mods1},Terms,Dir)
end;
-get_app_info(App=#cover{app=none}, [{excl_mods,Mods1}|Terms]) ->
- get_app_info(App, [{excl_mods,none,Mods1}|Terms]);
-get_app_info(App=#cover{app=Name}, [{excl_mods,Name,Mods1}|Terms]) ->
+get_app_info(App=#cover{app=none}, [{excl_mods,Mods1}|Terms], Dir) ->
+ get_app_info(App, [{excl_mods,none,Mods1}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{excl_mods,Name,Mods1}|Terms], Dir) ->
Mods = App#cover.excl_mods,
- get_app_info(App#cover{excl_mods=Mods++Mods1},Terms);
+ get_app_info(App#cover{excl_mods=Mods++Mods1},Terms,Dir);
-get_app_info(App=#cover{app=none}, [{cross,Cross}|Terms]) ->
- get_app_info(App, [{cross,none,Cross}|Terms]);
-get_app_info(App=#cover{app=Name}, [{cross,Name,Cross1}|Terms]) ->
+get_app_info(App=#cover{app=none}, [{cross,Cross}|Terms], Dir) ->
+ get_app_info(App, [{cross,none,Cross}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{cross,Name,Cross1}|Terms], Dir) ->
Cross = App#cover.cross,
- get_app_info(App#cover{cross=Cross++Cross1},Terms);
+ get_app_info(App#cover{cross=Cross++Cross1},Terms,Dir);
-get_app_info(App=#cover{app=none}, [{src_dirs,Dirs}|Terms]) ->
- get_app_info(App, [{src_dirs,none,Dirs}|Terms]);
-get_app_info(App=#cover{app=Name}, [{src_dirs,Name,Dirs}|Terms]) ->
- case get_files(Dirs, ".erl", false, []) of
+get_app_info(App=#cover{app=none}, [{src_dirs,Dirs}|Terms], Dir) ->
+ get_app_info(App, [{src_dirs,none,Dirs}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{src_dirs,Name,Dirs}|Terms], Dir) ->
+ case get_files(Dirs, Dir, ".erl", false, []) of
E = {error,_} -> E;
Src1 ->
Src = App#cover.src,
- get_app_info(App#cover{src=Src++Src1},Terms)
+ get_app_info(App#cover{src=Src++Src1},Terms,Dir)
end;
-get_app_info(App=#cover{app=none}, [{src_dirs_r,Dirs}|Terms]) ->
- get_app_info(App, [{src_dirs_r,none,Dirs}|Terms]);
-get_app_info(App=#cover{app=Name}, [{src_dirs_r,Name,Dirs}|Terms]) ->
- case get_files(Dirs, ".erl", true, []) of
+get_app_info(App=#cover{app=none}, [{src_dirs_r,Dirs}|Terms], Dir) ->
+ get_app_info(App, [{src_dirs_r,none,Dirs}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{src_dirs_r,Name,Dirs}|Terms], Dir) ->
+ case get_files(Dirs, Dir, ".erl", true, []) of
E = {error,_} -> E;
Src1 ->
Src = App#cover.src,
- get_app_info(App#cover{src=Src++Src1},Terms)
+ get_app_info(App#cover{src=Src++Src1},Terms,Dir)
end;
-get_app_info(App=#cover{app=none}, [{src_files,Src1}|Terms]) ->
- get_app_info(App, [{src_files,none,Src1}|Terms]);
-get_app_info(App=#cover{app=Name}, [{src_files,Name,Src1}|Terms]) ->
+get_app_info(App=#cover{app=none}, [{src_files,Src1}|Terms], Dir) ->
+ get_app_info(App, [{src_files,none,Src1}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{src_files,Name,Src1}|Terms], Dir) ->
Src = App#cover.src,
- get_app_info(App#cover{src=Src++Src1},Terms);
+ get_app_info(App#cover{src=Src++Src1},Terms,Dir);
-get_app_info(App, [_|Terms]) ->
- get_app_info(App, Terms);
+get_app_info(App, [_|Terms], Dir) ->
+ get_app_info(App, Terms, Dir);
-get_app_info(App, []) ->
+get_app_info(App, [], _) ->
App.
%% get_files(...)
-get_files([Dir|Dirs], Ext, Recurse, Files) ->
- case file:list_dir(Dir) of
+get_files([Dir|Dirs], RootDir, Ext, Recurse, Files) ->
+ DirAbs = filename:absname(Dir, RootDir),
+ case file:list_dir(DirAbs) of
{ok,Entries} ->
- {SubDirs,Matches} = analyse_files(Entries, Dir, Ext, [], []),
+ {SubDirs,Matches} = analyse_files(Entries, DirAbs, Ext, [], []),
if Recurse == false ->
- get_files(Dirs, Ext, Recurse, Files++Matches);
+ get_files(Dirs, RootDir, Ext, Recurse, Files++Matches);
true ->
- Files1 = get_files(SubDirs, Ext, Recurse, Files++Matches),
- get_files(Dirs, Ext, Recurse, Files1)
+ Files1 = get_files(SubDirs, RootDir, Ext, Recurse, Files++Matches),
+ get_files(Dirs, RootDir, Ext, Recurse, Files1)
end;
{error,Reason} ->
- {error,{Reason,Dir}}
+ {error,{Reason,DirAbs}}
end;
-get_files([], _Ext, _R, Files) ->
+get_files([], _RootDir, _Ext, _R, Files) ->
Files.
%% analyse_files(...)
diff --git a/lib/common_test/src/ct_framework.erl b/lib/common_test/src/ct_framework.erl
index e8ea7992b4..ec525784ec 100644
--- a/lib/common_test/src/ct_framework.erl
+++ b/lib/common_test/src/ct_framework.erl
@@ -1268,6 +1268,11 @@ report(What,Data) ->
Data1 = if GrName == undefined -> {Suite,Func,Result};
true -> Data
end,
+ %% Register the group leader for the process calling the report
+ %% function, making it possible for a hook function to print
+ %% in the test case log file
+ ReportingPid = self(),
+ ct_logs:register_groupleader(ReportingPid, group_leader()),
case Result of
{failed, _} ->
ct_hooks:on_tc_fail(What, Data1);
@@ -1282,6 +1287,7 @@ report(What,Data) ->
_Else ->
ok
end,
+ ct_logs:unregister_groupleader(ReportingPid),
case {Func,Result} of
{init_per_suite,_} ->
ok;
diff --git a/lib/common_test/src/ct_logs.erl b/lib/common_test/src/ct_logs.erl
index 7037cdca73..23332ad268 100644
--- a/lib/common_test/src/ct_logs.erl
+++ b/lib/common_test/src/ct_logs.erl
@@ -29,6 +29,7 @@
-module(ct_logs).
-export([init/2, close/2, init_tc/1, end_tc/1]).
+-export([register_groupleader/2, unregister_groupleader/1]).
-export([get_log_dir/0, get_log_dir/1]).
-export([log/3, start_log/1, cont_log/2, end_log/0]).
-export([set_stylesheet/2, clear_stylesheet/1]).
@@ -267,7 +268,7 @@ init_tc(RefreshLog) ->
ok.
%%%-----------------------------------------------------------------
-%%% @spec end_tc(TCPid) -> ok | {error,Reason}
+%%% @spec end_tc(TCPid) -> ok
%%%
%%% @doc Test case clean up (tool-internal use only).
%%%
@@ -278,6 +279,26 @@ end_tc(TCPid) ->
call({end_tc,TCPid}).
%%%-----------------------------------------------------------------
+%%% @spec register_groupleader(Pid,GroupLeader) -> ok
+%%%
+%%% @doc To enable logging to a group leader (tool-internal use only).
+%%%
+%%% <p>This function is called by ct_framework:report/2</p>
+register_groupleader(Pid,GroupLeader) ->
+ call({register_groupleader,Pid,GroupLeader}),
+ ok.
+
+%%%-----------------------------------------------------------------
+%%% @spec unregister_groupleader(Pid) -> ok
+%%%
+%%% @doc To disable logging to a group leader (tool-internal use only).
+%%%
+%%% <p>This function is called by ct_framework:report/2</p>
+unregister_groupleader(Pid) ->
+ call({unregister_groupleader,Pid}),
+ ok.
+
+%%%-----------------------------------------------------------------
%%% @spec log(Heading,Format,Args) -> ok
%%%
%%% @doc Log internal activity (tool-internal use only).
@@ -764,6 +785,14 @@ logger_loop(State) ->
return(From,ok),
logger_loop(State#logger_state{tc_groupleaders =
rm_tc_gl(TCPid,State)});
+ {{register_groupleader,Pid,GL},From} ->
+ GLs = add_tc_gl(Pid,GL,State),
+ return(From,ok),
+ logger_loop(State#logger_state{tc_groupleaders = GLs});
+ {{unregister_groupleader,Pid},From} ->
+ return(From,ok),
+ logger_loop(State#logger_state{tc_groupleaders =
+ rm_tc_gl(Pid,State)});
{{get_log_dir,true},From} ->
return(From,{ok,State#logger_state.log_dir}),
logger_loop(State);
diff --git a/lib/common_test/src/ct_release_test.erl b/lib/common_test/src/ct_release_test.erl
index eb9e9c832f..3f0b5bda67 100644
--- a/lib/common_test/src/ct_release_test.erl
+++ b/lib/common_test/src/ct_release_test.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2014. All Rights Reserved.
+%% Copyright Ericsson AB 2014-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -51,10 +51,11 @@
%% executed.
%%
%% <dl>
-%% <dt>Module:upgrade_init(State) -> NewState</dt>
+%% <dt>Module:upgrade_init(CtData,State) -> NewState</dt>
%% <dd>Types:
%%
-%% <b><c>State = NewState = cb_state()</c></b>
+%% <b><code>CtData = {@link ct_data()}</code></b><br/>
+%% <b><code>State = NewState = cb_state()</code></b>
%%
%% Initialyze system before upgrade test starts.
%%
@@ -63,17 +64,22 @@
%% the boot script, so this callback is intended for additional
%% initialization, if necessary.
%%
+%% <code>CtData</code> is an opaque data structure which shall be used
+%% in any call to <code>ct_release_test</code> inside the callback.
+%%
%% Example:
%%
%% ```
-%% upgrade_init(State) ->
+%% upgrade_init(CtData,State) ->
+%% {ok,{FromVsn,ToVsn}} = ct_release_test:get_app_vsns(CtData,myapp),
%% open_connection(State).'''
%% </dd>
%%
-%% <dt>Module:upgrade_upgraded(State) -> NewState</dt>
+%% <dt>Module:upgrade_upgraded(CtData,State) -> NewState</dt>
%% <dd>Types:
%%
-%% <b><c>State = NewState = cb_state()</c></b>
+%% <b><code>CtData = {@link ct_data()}</code></b><br/>
+%% <b><code>State = NewState = cb_state()</code></b>
%%
%% Check that upgrade was successful.
%%
@@ -82,17 +88,21 @@
%% been made permanent. It allows application specific checks to
%% ensure that the upgrade was successful.
%%
+%% <code>CtData</code> is an opaque data structure which shall be used
+%% in any call to <code>ct_release_test</code> inside the callback.
+%%
%% Example:
%%
%% ```
-%% upgrade_upgraded(State) ->
+%% upgrade_upgraded(CtData,State) ->
%% check_connection_still_open(State).'''
%% </dd>
%%
-%% <dt>Module:upgrade_downgraded(State) -> NewState</dt>
+%% <dt>Module:upgrade_downgraded(CtData,State) -> NewState</dt>
%% <dd>Types:
%%
-%% <b><c>State = NewState = cb_state()</c></b>
+%% <b><code>CtData = {@link ct_data()}</code></b><br/>
+%% <b><code>State = NewState = cb_state()</code></b>
%%
%% Check that downgrade was successful.
%%
@@ -101,10 +111,13 @@
%% made permanent. It allows application specific checks to ensure
%% that the downgrade was successful.
%%
+%% <code>CtData</code> is an opaque data structure which shall be used
+%% in any call to <code>ct_release_test</code> inside the callback.
+%%
%% Example:
%%
%% ```
-%% upgrade_init(State) ->
+%% upgrade_downgraded(CtData,State) ->
%% check_connection_closed(State).'''
%% </dd>
%% </dl>
@@ -112,7 +125,7 @@
%%-----------------------------------------------------------------
-module(ct_release_test).
--export([init/1, upgrade/4, cleanup/1]).
+-export([init/1, upgrade/4, cleanup/1, get_app_vsns/2, get_appup/2]).
-include_lib("kernel/include/file.hrl").
@@ -121,12 +134,17 @@
-define(exclude_apps, [hipe, typer, dialyzer]). % never include these apps
%%-----------------------------------------------------------------
+-record(ct_data, {from,to}).
+
+%%-----------------------------------------------------------------
-type config() :: [{atom(),term()}].
-type cb_state() :: term().
+-opaque ct_data() :: #ct_data{}.
+-export_type([ct_data/0]).
--callback upgrade_init(cb_state()) -> cb_state().
--callback upgrade_upgraded(cb_state()) -> cb_state().
--callback upgrade_downgraded(cb_state()) -> cb_state().
+-callback upgrade_init(ct_data(),cb_state()) -> cb_state().
+-callback upgrade_upgraded(ct_data(),cb_state()) -> cb_state().
+-callback upgrade_downgraded(ct_data(),cb_state()) -> cb_state().
%%-----------------------------------------------------------------
-spec init(Config) -> Result when
@@ -207,12 +225,12 @@ init(Config) ->
%% <li>Perform the upgrade test and allow customized
%% control by using callbacks:
%% <ol>
-%% <li>Callback: `upgrade_init/1'</li>
+%% <li>Callback: `upgrade_init/2'</li>
%% <li>Unpack the new release</li>
%% <li>Install the new release</li>
-%% <li>Callback: `upgrade_upgraded/1'</li>
+%% <li>Callback: `upgrade_upgraded/2'</li>
%% <li>Install the original release</li>
-%% <li>Callback: `upgrade_downgraded/1'</li>
+%% <li>Callback: `upgrade_downgraded/2'</li>
%% </ol>
%% </li>
%% </ol>
@@ -314,6 +332,71 @@ cleanup(Config) ->
Config.
%%-----------------------------------------------------------------
+-spec get_app_vsns(CtData,App) -> {ok,{From,To}} | {error,Reason} when
+ CtData :: ct_data(),
+ App :: atom(),
+ From :: string(),
+ To :: string(),
+ Reason :: {app_not_found,App}.
+%% @doc Get versions involved in this upgrade for the given application.
+%%
+%% This function can be called from inside any of the callback
+%% functions. It returns the old (From) and new (To) versions involved
+%% in the upgrade/downgrade test for the given application.
+%%
+%% <code>CtData</code> must be the first argument received in the
+%% calling callback function - an opaque data structure set by
+%% <code>ct_release_tests</code>.
+get_app_vsns(#ct_data{from=FromApps,to=ToApps},App) ->
+ case {lists:keyfind(App,1,FromApps),lists:keyfind(App,1,ToApps)} of
+ {{App,FromVsn,_},{App,ToVsn,_}} ->
+ {ok,{FromVsn,ToVsn}};
+ _ ->
+ {error,{app_not_found,App}}
+ end.
+
+%%-----------------------------------------------------------------
+-spec get_appup(CtData,App) -> {ok,Appup} | {error,Reason} when
+ CtData :: ct_data(),
+ App :: atom(),
+ Appup :: {From,To,Up,Down},
+ From :: string(),
+ To :: string(),
+ Up :: [Instr],
+ Down :: [Instr],
+ Instr :: term(),
+ Reason :: {app_not_found,App} | {vsn_not_found,{App,From}}.
+%% @doc Get appup instructions for the given application.
+%%
+%% This function can be called from inside any of the callback
+%% functions. It reads the appup file for the given application and
+%% returns the instructions for upgrade and downgrade for the versions
+%% in the test.
+%%
+%% <code>CtData</code> must be the first argument received in the
+%% calling callback function - an opaque data structure set by
+%% <code>ct_release_tests</code>.
+%%
+%% See reference manual for appup files for types definitions for the
+%% instructions.
+get_appup(#ct_data{from=FromApps,to=ToApps},App) ->
+ case lists:keyfind(App,1,ToApps) of
+ {App,ToVsn,ToDir} ->
+ Appup = filename:join([ToDir, "ebin", atom_to_list(App)++".appup"]),
+ {ok, [{ToVsn, Ups, Downs}]} = file:consult(Appup),
+ {App,FromVsn,_} = lists:keyfind(App,1,FromApps),
+ case {systools_relup:appup_search_for_version(FromVsn,Ups),
+ systools_relup:appup_search_for_version(FromVsn,Downs)} of
+ {{ok,Up},{ok,Down}} ->
+ {ok,{FromVsn,ToVsn,Up,Down}};
+ _ ->
+ {error,{vsn_not_found,{App,FromVsn}}}
+ end;
+ false ->
+ {error,{app_not_found,App}}
+ end.
+
+%%-----------------------------------------------------------------
init_upgrade_test() ->
%% Check that a real release is running, not e.g. cerl
ok = application:ensure_started(sasl),
@@ -558,8 +641,14 @@ do_upgrade({Cb,InitState},FromVsn,FromAppsVsns,ToRel,ToAppsVsns,InstallDir) ->
Start = filename:join([InstallDir,bin,start]),
{ok,Node} = start_node(Start,FromVsn,FromAppsVsns),
+ %% Add path to this module, to allow calls to get_appup/2
+ Dir = filename:dirname(code:which(?MODULE)),
+ _ = rpc:call(Node,code,add_pathz,[Dir]),
+
ct:log("Node started: ~p",[Node]),
- State1 = do_callback(Node,Cb,upgrade_init,InitState),
+ CtData = #ct_data{from = [{A,V,code:lib_dir(A)} || {A,V} <- FromAppsVsns],
+ to=[{A,V,code:lib_dir(A)} || {A,V} <- ToAppsVsns]},
+ State1 = do_callback(Node,Cb,upgrade_init,[CtData,InitState]),
[{"OTP upgrade test",FromVsn,_,permanent}] =
rpc:call(Node,release_handler,which_releases,[]),
@@ -592,7 +681,7 @@ do_upgrade({Cb,InitState},FromVsn,FromAppsVsns,ToRel,ToAppsVsns,InstallDir) ->
{"OTP upgrade test",FromVsn,_,old}] =
rpc:call(Node,release_handler,which_releases,[]),
- State2 = do_callback(Node,Cb,upgrade_upgraded,State1),
+ State2 = do_callback(Node,Cb,upgrade_upgraded,[CtData,State1]),
ct:log("Re-installing old release"),
case rpc:call(Node,release_handler,install_release,[FromVsn]) of
@@ -615,7 +704,7 @@ do_upgrade({Cb,InitState},FromVsn,FromAppsVsns,ToRel,ToAppsVsns,InstallDir) ->
{"OTP upgrade test",FromVsn,_,permanent}] =
rpc:call(Node,release_handler,which_releases,[]),
- _State3 = do_callback(Node,Cb,upgrade_downgraded,State2),
+ _State3 = do_callback(Node,Cb,upgrade_downgraded,[CtData,State2]),
ct:log("Terminating node ~p",[Node]),
erlang:monitor_node(Node,true),
@@ -625,15 +714,15 @@ do_upgrade({Cb,InitState},FromVsn,FromAppsVsns,ToRel,ToAppsVsns,InstallDir) ->
ok.
-do_callback(Node,Mod,Func,State) ->
+do_callback(Node,Mod,Func,Args) ->
Dir = filename:dirname(code:which(Mod)),
_ = rpc:call(Node,code,add_path,[Dir]),
ct:log("Calling ~p:~p/1",[Mod,Func]),
- R = rpc:call(Node,Mod,Func,[State]),
- ct:log("~p:~p/1 returned: ~p",[Mod,Func,R]),
+ R = rpc:call(Node,Mod,Func,Args),
+ ct:log("~p:~p/~w returned: ~p",[Mod,Func,length(Args),R]),
case R of
{badrpc,Error} ->
- test_server:fail({test_upgrade_callback,Mod,Func,State,Error});
+ test_server:fail({test_upgrade_callback,Mod,Func,Args,Error});
NewState ->
NewState
end.
diff --git a/lib/common_test/src/ct_run.erl b/lib/common_test/src/ct_run.erl
index 00d0aab507..4a12481214 100644
--- a/lib/common_test/src/ct_run.erl
+++ b/lib/common_test/src/ct_run.erl
@@ -293,10 +293,10 @@ script_start1(Parent, Args) ->
application:set_env(common_test, auto_compile, true),
InclDirs =
case proplists:get_value(include, Args) of
- Incl when is_list(hd(Incl)) ->
- Incl;
+ Incls when is_list(hd(Incls)) ->
+ [filename:absname(IDir) || IDir <- Incls];
Incl when is_list(Incl) ->
- [Incl];
+ [filename:absname(Incl)];
undefined ->
[]
end,
@@ -774,7 +774,8 @@ script_usage() ->
"\n\t[-basic_html]\n\n"),
io:format("Run tests from command line:\n\n"
"\tct_run [-dir TestDir1 TestDir2 .. TestDirN] |"
- "\n\t[-suite Suite1 Suite2 .. SuiteN [-case Case1 Case2 .. CaseN]]"
+ "\n\t[[-dir TestDir] -suite Suite1 Suite2 .. SuiteN"
+ "\n\t [[-group Groups1 Groups2 .. GroupsN] [-case Case1 Case2 .. CaseN]]]"
"\n\t[-step [config | keep_inactive]]"
"\n\t[-config ConfigFile1 ConfigFile2 .. ConfigFileN]"
"\n\t[-userconfig CallbackModule ConfigFile1 .. ConfigFileN]"
@@ -1023,10 +1024,10 @@ run_test2(StartOpts) ->
case proplists:get_value(include, StartOpts) of
undefined ->
[];
- Incl when is_list(hd(Incl)) ->
- Incl;
+ Incls when is_list(hd(Incls)) ->
+ [filename:absname(IDir) || IDir <- Incls];
Incl when is_list(Incl) ->
- [Incl]
+ [filename:absname(Incl)]
end,
case os:getenv("CT_INCLUDE_PATH") of
false ->
@@ -1393,6 +1394,7 @@ run_testspec2(TestSpec) ->
EnvInclude++Opts#opts.include
end,
application:set_env(common_test, include, AllInclude),
+
LogDir1 = which(logdir,Opts#opts.logdir),
case check_and_install_configfiles(
Opts#opts.config, LogDir1, Opts) of
@@ -2134,6 +2136,14 @@ do_run_test(Tests, Skip, Opts0) ->
case check_and_add(Tests, [], []) of
{ok,AddedToPath} ->
ct_util:set_testdata({stats,{0,0,{0,0}}}),
+
+ %% test_server needs to know the include path too
+ InclPath = case application:get_env(common_test, include) of
+ {ok,Incls} -> Incls;
+ _ -> []
+ end,
+ application:set_env(test_server, include, InclPath),
+
test_server_ctrl:start_link(local),
%% let test_server expand the test tuples and count no of cases
diff --git a/lib/common_test/src/ct_telnet.erl b/lib/common_test/src/ct_telnet.erl
index babe73e575..4e03bf8630 100644
--- a/lib/common_test/src/ct_telnet.erl
+++ b/lib/common_test/src/ct_telnet.erl
@@ -1122,7 +1122,8 @@ teln_expect1(Name,Pid,Data,Pattern,Acc,EO=#eo{idle_timeout=IdleTO,
NotFinished ->
%% Get more data
Fun = fun() -> get_data1(EO#eo.teln_pid) end,
- case timer:tc(ct_gen_conn, do_within_time, [Fun, IdleTO]) of
+ BreakAfter = if TotalTO < IdleTO -> TotalTO; true -> IdleTO end,
+ case timer:tc(ct_gen_conn, do_within_time, [Fun, BreakAfter]) of
{_,{error,Reason}} ->
%% A timeout will occur when the telnet connection
%% is idle for EO#eo.idle_timeout milliseconds.
diff --git a/lib/common_test/test/ct_cover_SUITE.erl b/lib/common_test/test/ct_cover_SUITE.erl
index 87ba4ae1b9..1dab425509 100644
--- a/lib/common_test/test/ct_cover_SUITE.erl
+++ b/lib/common_test/test/ct_cover_SUITE.erl
@@ -77,7 +77,11 @@ all() ->
ct_cover_add_remove_nodes,
otp_9956,
cross,
- export_import
+ export_import,
+ relative_incl_dirs,
+ absolute_incl_dirs,
+ relative_excl_dirs,
+ absolute_excl_dirs
].
%%--------------------------------------------------------------------
@@ -215,6 +219,45 @@ export_import(Config) ->
check_calls(Events2,2),
ok.
+relative_incl_dirs(Config) ->
+ false = check_cover(Config),
+ RelDir = rel_path(?config(priv_dir, Config), ?config(data_dir, Config)),
+ CoverSpec = [{incl_dirs, [RelDir]}],
+ CoverFile = create_cover_file(rel_incl_dirs, CoverSpec, Config),
+ Opts = [{cover, CoverFile}],
+ {ok, Events} = run_test(rel_incl_dirs, default, Opts, Config),
+ check_calls(Events, 1),
+ ok.
+
+absolute_incl_dirs(Config) ->
+ false = check_cover(Config),
+ CoverSpec = [{incl_dirs, [?config(data_dir, Config)]}],
+ CoverFile = create_cover_file(abs_incl_dirs, CoverSpec, Config),
+ Opts = [{cover, CoverFile}],
+ {ok, Events} = run_test(abs_incl_dirs, default, Opts, Config),
+ check_calls(Events, 1),
+ ok.
+
+relative_excl_dirs(Config) ->
+ false = check_cover(Config),
+ RelDir = rel_path(?config(priv_dir, Config), ?config(data_dir, Config)),
+ CoverSpec = default_cover_file_content() ++ [{excl_dirs, [RelDir]}],
+ CoverFile = create_cover_file(rel_excl_dirs, CoverSpec, Config),
+ Opts = [{cover, CoverFile}],
+ {ok, Events} = run_test(rel_excl_dirs, default_no_cover, Opts, Config),
+ check_no_cover_compiled(Events),
+ ok.
+
+absolute_excl_dirs(Config) ->
+ false = check_cover(Config),
+ AbsDir = ?config(data_dir, Config),
+ CoverSpec = default_cover_file_content() ++ [{excl_dirs, [AbsDir]}],
+ CoverFile = create_cover_file(abs_excl_dirs, CoverSpec, Config),
+ Opts = [{cover, CoverFile}],
+ {ok, Events} = run_test(abs_excl_dirs, default_no_cover, Opts, Config),
+ check_no_cover_compiled(Events),
+ ok.
+
%%%-----------------------------------------------------------------
%%% HELP FUNCTIONS
%%%-----------------------------------------------------------------
@@ -288,23 +331,36 @@ get_log_dirs(Events) ->
{ct_test_support_eh,
{event,start_logging,_Node,LogDir}} <- Events].
+%% Check if a module was compiled without cover
+check_no_cover_compiled(Events) ->
+ check_no_cover_compiled(Events, ?mod).
+check_no_cover_compiled(Events, Mod) ->
+ [ {error, {not_cover_compiled, Mod}} = analyse_log(CoverLog, Mod)
+ || CoverLog <- cover_logs(Events) ].
+
%% Check that each coverlog includes N calls to ?mod:foo/0
check_calls(Events,N) ->
check_calls(Events,{?mod,foo,0},N).
check_calls(Events,MFA,N) ->
- CoverLogs = [filename:join(D,"all.coverdata") || D <- get_log_dirs(Events)],
- do_check_logs(CoverLogs,MFA,N).
+ do_check_logs(cover_logs(Events),MFA,N).
do_check_logs([CoverLog|CoverLogs],{Mod,_,_} = MFA,N) ->
- {ok,_} = cover:start(),
- ok = cover:import(CoverLog),
- {ok,Calls} = cover:analyse(Mod,calls,function),
- ok = cover:stop(),
+ {ok, Calls} = analyse_log(CoverLog, Mod),
{MFA,N} = lists:keyfind(MFA,1,Calls),
do_check_logs(CoverLogs,MFA,N);
do_check_logs([],_,_) ->
ok.
+cover_logs(Events) ->
+ [filename:join(D,"all.coverdata") || D <- get_log_dirs(Events)].
+
+analyse_log(CoverLog, Mod) ->
+ {ok, _} = cover:start(),
+ ok = cover:import(CoverLog),
+ Result = cover:analyse(Mod, calls, function),
+ ok = cover:stop(),
+ Result.
+
fullname(Name) ->
{ok,Host} = inet:gethostname(),
list_to_atom(atom_to_list(Name) ++ "@" ++ Host).
@@ -333,3 +389,12 @@ start_slave(Name,Args) ->
{boot_timeout,10}, % extending some timers for slow test hosts
{init_timeout,10},
{startup_timeout,10}]).
+
+rel_path(From, To) ->
+ Segments = do_rel_path(filename:split(From), filename:split(To)),
+ filename:join(Segments).
+
+do_rel_path([Seg|RestA], [Seg|RestB]) ->
+ do_rel_path(RestA, RestB);
+do_rel_path(PathA, PathB) ->
+ lists:duplicate(length(PathA), "..") ++ PathB.
diff --git a/lib/common_test/test/ct_cover_SUITE_data/cover_SUITE.erl b/lib/common_test/test/ct_cover_SUITE_data/cover_SUITE.erl
index 83d368c53d..789e48bd96 100644
--- a/lib/common_test/test/ct_cover_SUITE_data/cover_SUITE.erl
+++ b/lib/common_test/test/ct_cover_SUITE_data/cover_SUITE.erl
@@ -71,6 +71,10 @@ default(_Config) ->
cover_test_mod:foo(),
ok.
+default_no_cover(_Config) ->
+ cover_test_mod:foo(),
+ ok.
+
slave(_Config) ->
cover_compiled = code:which(cover_test_mod),
cover_test_mod:foo(),
diff --git a/lib/common_test/test/ct_event_handler_SUITE.erl b/lib/common_test/test/ct_event_handler_SUITE.erl
index b534a7141d..30a5e650fe 100644
--- a/lib/common_test/test/ct_event_handler_SUITE.erl
+++ b/lib/common_test/test/ct_event_handler_SUITE.erl
@@ -156,18 +156,21 @@ results(Config) when is_list(Config) ->
TestEvents =
[{eh_A,start_logging,{'DEF','RUNDIR'}},
{eh_A,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
- {eh_A,start_info,{1,1,3}},
+ {eh_A,start_info,{1,1,4}},
{eh_A,tc_start,{eh_11_SUITE,init_per_suite}},
{eh_A,tc_done,{eh_11_SUITE,init_per_suite,ok}},
{eh_A,tc_start,{eh_11_SUITE,tc1}},
{eh_A,tc_done,{eh_11_SUITE,tc1,ok}},
{eh_A,test_stats,{1,0,{0,0}}},
{eh_A,tc_start,{eh_11_SUITE,tc2}},
- {eh_A,tc_done,{eh_11_SUITE,tc2,{skipped,"Skipped"}}},
+ {eh_A,tc_done,{eh_11_SUITE,tc2,{skipped,"Skip"}}},
{eh_A,test_stats,{1,0,{1,0}}},
{eh_A,tc_start,{eh_11_SUITE,tc3}},
- {eh_A,tc_done,{eh_11_SUITE,tc3,{failed,{error,'Failing'}}}},
- {eh_A,test_stats,{1,1,{1,0}}},
+ {eh_A,tc_done,{eh_11_SUITE,tc3,{skipped,"Skipped"}}},
+ {eh_A,test_stats,{1,0,{2,0}}},
+ {eh_A,tc_start,{eh_11_SUITE,tc4}},
+ {eh_A,tc_done,{eh_11_SUITE,tc4,{failed,{error,'Failing'}}}},
+ {eh_A,test_stats,{1,1,{2,0}}},
{eh_A,tc_start,{eh_11_SUITE,end_per_suite}},
{eh_A,tc_done,{eh_11_SUITE,end_per_suite,ok}},
{eh_A,test_done,{'DEF','STOP_TIME'}},
diff --git a/lib/common_test/test/ct_event_handler_SUITE_data/event_handling_1/test/eh_11_SUITE.erl b/lib/common_test/test/ct_event_handler_SUITE_data/event_handling_1/test/eh_11_SUITE.erl
index 16b7129993..a52fe96f30 100644
--- a/lib/common_test/test/ct_event_handler_SUITE_data/event_handling_1/test/eh_11_SUITE.erl
+++ b/lib/common_test/test/ct_event_handler_SUITE_data/event_handling_1/test/eh_11_SUITE.erl
@@ -124,7 +124,7 @@ end_per_testcase(_TestCase, _Config) ->
%% Description: Returns the list of test cases that are to be executed.
%%--------------------------------------------------------------------
all() ->
- [tc1, tc2, tc3].
+ [tc1, tc2, tc3, tc4].
%%--------------------------------------------------------------------
@@ -135,7 +135,10 @@ tc1(_Config) ->
ok.
tc2(_Config) ->
- {skip,"Skipped"}.
+ {skip,"Skip"}.
-tc3(_Config) ->
+tc3(_Config) ->
+ {skipped,"Skipped"}.
+
+tc4(_Config) ->
exit('Failing').
diff --git a/lib/common_test/test/ct_hooks_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE.erl
index c8fc4bd59b..d5ad8312e6 100644
--- a/lib/common_test/test/ct_hooks_SUITE.erl
+++ b/lib/common_test/test/ct_hooks_SUITE.erl
@@ -1075,7 +1075,37 @@ test_events(fail_n_skip_with_minimal_cth) ->
{?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
{?eh,cth,{'_',init,['_',[]]}},
{?eh,tc_start,{'_',init_per_suite}},
-
+
+ {parallel,
+ [{?eh,tc_start,{ct_cth_fail_one_skip_one_SUITE,{init_per_group,
+ group1,[parallel]}}},
+ {?eh,tc_done,{ct_cth_fail_one_skip_one_SUITE,{init_per_group,
+ group1,[parallel]},ok}},
+ {parallel,
+ [{?eh,tc_start,{ct_cth_fail_one_skip_one_SUITE,{init_per_group,
+ group2,[parallel]}}},
+ {?eh,tc_done,{ct_cth_fail_one_skip_one_SUITE,{init_per_group,
+ group2,[parallel]},ok}},
+ %% Verify that 'skip' as well as 'skipped' works
+ {?eh,tc_start,{ct_cth_fail_one_skip_one_SUITE,test_case2}},
+ {?eh,tc_done,{ct_cth_fail_one_skip_one_SUITE,test_case2,{skipped,"skip it"}}},
+ {?eh,tc_start,{ct_cth_fail_one_skip_one_SUITE,test_case3}},
+ {?eh,tc_done,{ct_cth_fail_one_skip_one_SUITE,test_case3,{skipped,"skip it"}}},
+ {?eh,cth,{empty_cth,on_tc_skip,[{test_case2,group2},
+ {tc_user_skip,{skipped,"skip it"}},
+ []]}},
+ {?eh,cth,{empty_cth,on_tc_skip,[{test_case3,group2},
+ {tc_user_skip,{skipped,"skip it"}},
+ []]}},
+ {?eh,tc_start,{ct_cth_fail_one_skip_one_SUITE,{end_per_group,
+ group2,[parallel]}}},
+ {?eh,tc_done,{ct_cth_fail_one_skip_one_SUITE,{end_per_group,group2,
+ [parallel]},ok}}]},
+ {?eh,tc_start,{ct_cth_fail_one_skip_one_SUITE,{end_per_group,
+ group1,[parallel]}}},
+ {?eh,tc_done,{ct_cth_fail_one_skip_one_SUITE,{end_per_group,
+ group1,[parallel]},ok}}]},
+
{?eh,tc_done,{'_',end_per_suite,ok}},
{?eh,cth,{'_',terminate,[[]]}},
{?eh,stop_logging,[]}
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_cth_fail_one_skip_one_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_cth_fail_one_skip_one_SUITE.erl
index b2f22d8257..7b84c246ca 100644
--- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_cth_fail_one_skip_one_SUITE.erl
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_cth_fail_one_skip_one_SUITE.erl
@@ -41,6 +41,8 @@ end_per_group(_Group,_Config) ->
init_per_testcase(test_case2, Config) ->
{skip,"skip it"};
+init_per_testcase(test_case3, Config) ->
+ {skipped,"skip it"};
init_per_testcase(_TestCase, Config) ->
Config.
@@ -48,7 +50,9 @@ end_per_testcase(_TestCase, _Config) ->
ok.
groups() ->
- [{group1,[parallel],[{group2,[parallel],[test_case1,test_case2,test_case3]}]}].
+ [{group1,[parallel],
+ [{group2,[parallel],
+ [test_case1,test_case2,test_case3,test_case4]}]}].
all() ->
[{group,group1}].
@@ -62,3 +66,6 @@ test_case2(Config) ->
test_case3(Config) ->
ok.
+
+test_case4(Config) ->
+ ok.
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/empty_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/empty_cth.erl
index 6caac7e447..77783fccf5 100644
--- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/empty_cth.erl
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/empty_cth.erl
@@ -75,6 +75,7 @@
init(Id, Opts) ->
gen_event:notify(?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, init, [Id, Opts]}}),
+ ct:log("~w:init called", [?MODULE]),
{ok,Opts}.
%% @doc The ID is used to uniquly identify an CTH instance, if two CTH's
@@ -85,6 +86,7 @@ init(Id, Opts) ->
id(Opts) ->
gen_event:notify(?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, id, [Opts]}}),
+ ct:log("~w:id called", [?MODULE]),
now().
%% @doc Called before init_per_suite is called. Note that this callback is
@@ -100,6 +102,7 @@ pre_init_per_suite(Suite,Config,State) ->
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, pre_init_per_suite,
[Suite,Config,State]}}),
+ ct:log("~w:pre_init_per_suite(~w) called", [?MODULE,Suite]),
{Config, State}.
%% @doc Called after init_per_suite.
@@ -114,6 +117,7 @@ post_init_per_suite(Suite,Config,Return,State) ->
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, post_init_per_suite,
[Suite,Config,Return,State]}}),
+ ct:log("~w:post_init_per_suite(~w) called", [?MODULE,Suite]),
{Return, State}.
%% @doc Called before end_per_suite. The config/state can be changed here,
@@ -127,6 +131,7 @@ pre_end_per_suite(Suite,Config,State) ->
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, pre_end_per_suite,
[Suite,Config,State]}}),
+ ct:log("~w:pre_end_per_suite(~w) called", [?MODULE,Suite]),
{Config, State}.
%% @doc Called after end_per_suite. Note that the config cannot be
@@ -141,6 +146,7 @@ post_end_per_suite(Suite,Config,Return,State) ->
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, post_end_per_suite,
[Suite,Config,Return,State]}}),
+ ct:log("~w:post_end_per_suite(~w) called", [?MODULE,Suite]),
{Return, State}.
%% @doc Called before each init_per_group.
@@ -154,6 +160,7 @@ pre_init_per_group(Group,Config,State) ->
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, pre_init_per_group,
[Group,Config,State]}}),
+ ct:log("~w:pre_init_per_group(~w) called", [?MODULE,Group]),
{Config, State}.
%% @doc Called after each init_per_group.
@@ -168,6 +175,7 @@ post_init_per_group(Group,Config,Return,State) ->
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, post_init_per_group,
[Group,Config,Return,State]}}),
+ ct:log("~w:post_init_per_group(~w) called", [?MODULE,Group]),
{Return, State}.
%% @doc Called after each end_per_group. The config/state can be changed here,
@@ -181,6 +189,7 @@ pre_end_per_group(Group,Config,State) ->
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, pre_end_per_group,
[Group,Config,State]}}),
+ ct:log("~w:pre_end_per_group(~w) called", [?MODULE,Group]),
{Config, State}.
%% @doc Called after each end_per_group. Note that the config cannot be
@@ -195,6 +204,7 @@ post_end_per_group(Group,Config,Return,State) ->
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, post_end_per_group,
[Group,Config,Return,State]}}),
+ ct:log("~w:post_end_per_group(~w) called", [?MODULE,Group]),
{Return, State}.
%% @doc Called before each test case.
@@ -208,6 +218,7 @@ pre_init_per_testcase(TC,Config,State) ->
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, pre_init_per_testcase,
[TC,Config,State]}}),
+ ct:log("~w:pre_init_per_testcase(~w) called", [?MODULE,TC]),
{Config, State}.
%% @doc Called after each test case. Note that the config cannot be
@@ -222,6 +233,7 @@ post_end_per_testcase(TC,Config,Return,State) ->
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, post_end_per_testcase,
[TC,Config,Return,State]}}),
+ ct:log("~w:post_end_per_testcase(~w) called", [?MODULE,TC]),
{Return, State}.
%% @doc Called after post_init_per_suite, post_end_per_suite, post_init_per_group,
@@ -237,6 +249,7 @@ on_tc_fail(TC, Reason, State) ->
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, on_tc_fail,
[TC,Reason,State]}}),
+ ct:log("~w:on_tc_fail(~w) called", [?MODULE,TC]),
State.
%% @doc Called when a test case is skipped by either user action
@@ -253,6 +266,7 @@ on_tc_skip(TC, Reason, State) ->
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, on_tc_skip,
[TC,Reason,State]}}),
+ ct:log("~w:on_tc_skip(~w) called", [?MODULE,TC]),
State.
%% @doc Called when the scope of the CTH is done, this depends on
@@ -274,4 +288,5 @@ terminate(State) ->
gen_event:notify(
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, terminate, [State]}}),
+ ct:log("~w:terminate called", [?MODULE]),
ok.
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/minimal_terminate_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/minimal_terminate_cth.erl
index 30721a6b3a..436470f46d 100644
--- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/minimal_terminate_cth.erl
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/minimal_terminate_cth.erl
@@ -28,10 +28,14 @@
%% CT Hooks
-export([init/2]).
-export([terminate/1]).
+-export([on_tc_skip/3]).
init(Id, Opts) ->
empty_cth:init(Id, Opts).
+on_tc_skip(TC, Reason, State) ->
+ empty_cth:on_tc_skip(TC,Reason,State).
+
terminate(State) ->
empty_cth:terminate(State).
diff --git a/lib/common_test/test/ct_telnet_SUITE_data/ct_telnet_own_server_SUITE.erl b/lib/common_test/test/ct_telnet_SUITE_data/ct_telnet_own_server_SUITE.erl
index 0ddb4e9b00..bd5d76266a 100644
--- a/lib/common_test/test/ct_telnet_SUITE_data/ct_telnet_own_server_SUITE.erl
+++ b/lib/common_test/test/ct_telnet_SUITE_data/ct_telnet_own_server_SUITE.erl
@@ -44,6 +44,7 @@ all() ->
expect_error_timeout1,
expect_error_timeout2,
expect_error_timeout3,
+ total_timeout_less_than_idle,
no_prompt_check,
no_prompt_check_repeat,
no_prompt_check_sequence,
@@ -134,9 +135,32 @@ expect_error_timeout2(_) ->
expect_error_timeout3(_) ->
{ok, Handle} = ct_telnet:open(telnet_server_conn1),
ok = ct_telnet:send(Handle, "echo_loop 5000 xxx"),
+
+ T0 = now(),
{error,timeout} = ct_telnet:expect(Handle, ["yyy"],
[{idle_timeout,infinity},
- {total_timeout,3000}]),
+ {total_timeout,2001}]),
+ Diff = trunc(timer:now_diff(now(),T0)/1000),
+ {_,true} = {Diff, (Diff >= 2000) and (Diff =< 4000)},
+
+ ok = ct_telnet:send(Handle, "echo ayt"),
+ {ok,["ayt"]} = ct_telnet:expect(Handle, ["ayt"]),
+ ok = ct_telnet:close(Handle),
+ ok.
+
+%% OTP-12335: If total_timeout < idle_timeout, expect will never timeout
+%% until after idle_timeout, which is incorrect.
+total_timeout_less_than_idle(_) ->
+ {ok, Handle} = ct_telnet:open(telnet_server_conn1),
+ ok = ct_telnet:send(Handle, "echo_no_prompt xxx"),
+
+ T0 = now(),
+ {error,timeout} = ct_telnet:expect(Handle, ["yyy"],
+ [{idle_timeout,5000},
+ {total_timeout,2001}]),
+ Diff = trunc(timer:now_diff(now(),T0)/1000),
+ {_,true} = {Diff, (Diff >= 2000) and (Diff =< 4000)},
+
ok = ct_telnet:send(Handle, "echo ayt"),
{ok,["ayt"]} = ct_telnet:expect(Handle, ["ayt"]),
ok = ct_telnet:close(Handle),
diff --git a/lib/compiler/src/Makefile b/lib/compiler/src/Makefile
index c6d09d85eb..2032392821 100644
--- a/lib/compiler/src/Makefile
+++ b/lib/compiler/src/Makefile
@@ -81,6 +81,7 @@ MODULES = \
rec_env \
sys_core_dsetel \
sys_core_fold \
+ sys_core_fold_lists \
sys_core_inline \
sys_pre_attributes \
sys_pre_expand \
@@ -187,6 +188,7 @@ $(EBIN)/core_parse.beam: core_parse.hrl $(EGEN)/core_parse.erl
$(EBIN)/core_pp.beam: core_parse.hrl
$(EBIN)/sys_core_dsetel.beam: core_parse.hrl
$(EBIN)/sys_core_fold.beam: core_parse.hrl
+$(EBIN)/sys_core_fold_lists.beam: core_parse.hrl
$(EBIN)/sys_core_inline.beam: core_parse.hrl
$(EBIN)/sys_pre_expand.beam: ../../stdlib/include/erl_bits.hrl
$(EBIN)/v3_codegen.beam: v3_life.hrl
diff --git a/lib/compiler/src/beam_a.erl b/lib/compiler/src/beam_a.erl
index fe4f473846..dd7e03dd28 100644
--- a/lib/compiler/src/beam_a.erl
+++ b/lib/compiler/src/beam_a.erl
@@ -54,6 +54,9 @@ rename_instrs([{call_only,A,F}|Is]) ->
[{call,A,F},return|rename_instrs(Is)];
rename_instrs([{call_ext_only,A,F}|Is]) ->
[{call_ext,A,F},return|rename_instrs(Is)];
+rename_instrs([{'%live',_}|Is]) ->
+ %% When compiling from old .S files.
+ rename_instrs(Is);
rename_instrs([I|Is]) ->
[rename_instr(I)|rename_instrs(Is)];
rename_instrs([]) -> [].
diff --git a/lib/compiler/src/beam_block.erl b/lib/compiler/src/beam_block.erl
index 5626aa34ab..92f09e400c 100644
--- a/lib/compiler/src/beam_block.erl
+++ b/lib/compiler/src/beam_block.erl
@@ -155,7 +155,7 @@ collect(remove_message) -> {set,[],[],remove_message};
collect({put_map,F,Op,S,D,R,{list,Puts}}) ->
{set,[D],[S|Puts],{alloc,R,{put_map,Op,F}}};
collect({get_map_elements,F,S,{list,Gets}}) ->
- {Ss,Ds} = beam_utils:spliteven(Gets),
+ {Ss,Ds} = beam_utils:split_even(Gets),
{set,Ds,[S|Ss],{get_map_elements,F}};
collect({'catch',R,L}) -> {set,[R],[],{'catch',L}};
collect(fclearerror) -> {set,[],[],fclearerror};
@@ -184,7 +184,7 @@ embed_lines([], Acc) -> Acc.
opt_blocks([{block,Bl0}|Is]) ->
%% The live annotation at the beginning is not useful.
- [{'%live',_}|Bl] = Bl0,
+ [{'%live',_,_}|Bl] = Bl0,
[{block,opt_block(Bl)}|opt_blocks(Is)];
opt_blocks([I|Is]) ->
[I|opt_blocks(Is)];
@@ -269,7 +269,7 @@ opt([{set,_,_,{line,_}}=Line1,
opt([{set,Ds0,Ss,Op}|Is0]) ->
{Ds,Is} = opt_moves(Ds0, Is0),
[{set,Ds,Ss,Op}|opt(Is)];
-opt([{'%live',_}=I|Is]) ->
+opt([{'%live',_,_}=I|Is]) ->
[I|opt(Is)];
opt([]) -> [].
diff --git a/lib/compiler/src/beam_bool.erl b/lib/compiler/src/beam_bool.erl
index 5a4621dc37..a452d30b61 100644
--- a/lib/compiler/src/beam_bool.erl
+++ b/lib/compiler/src/beam_bool.erl
@@ -126,44 +126,53 @@ bopt_block(Reg, Fail, OldIs, [{block,Bl0}|Acc0], St0) ->
%% There was a reference to a boolean expression
%% from inside a protected block (try/catch), to
%% a boolean expression outside.
- throw:protected_barrier ->
+ throw:protected_barrier ->
failed;
- %% The 'xor' operator was used. We currently don't
- %% find it worthwile to translate 'xor' operators
- %% (the code would be clumsy).
- throw:'xor' ->
+ %% The 'xor' operator was used. We currently don't
+ %% find it worthwile to translate 'xor' operators
+ %% (the code would be clumsy).
+ throw:'xor' ->
failed;
- %% The block does not contain a boolean expression,
- %% but only a call to a guard BIF.
- %% For instance: ... when element(1, T) ->
- throw:not_boolean_expr ->
+ %% The block does not contain a boolean expression,
+ %% but only a call to a guard BIF.
+ %% For instance: ... when element(1, T) ->
+ throw:not_boolean_expr ->
failed;
- %% The block contains a 'move' instruction that could
- %% not be handled.
- throw:move ->
+ %% The block contains a 'move' instruction that could
+ %% not be handled.
+ throw:move ->
failed;
- %% The optimization is not safe. (A register
- %% used by the instructions following the
- %% optimized code is either not assigned a
- %% value at all or assigned a different value.)
- throw:all_registers_not_killed ->
+ %% The optimization is not safe. (A register
+ %% used by the instructions following the
+ %% optimized code is either not assigned a
+ %% value at all or assigned a different value.)
+ throw:all_registers_not_killed ->
failed;
- throw:registers_used ->
+ throw:registers_used ->
failed;
- %% A protected block refered to the value
- %% returned by another protected block,
- %% probably because the Core Erlang code
- %% used nested try/catches in the guard.
- %% (v3_core never produces nested try/catches
- %% in guards, so it must have been another
- %% Core Erlang translator.)
- throw:protected_violation ->
+ %% A protected block refered to the value
+ %% returned by another protected block,
+ %% probably because the Core Erlang code
+ %% used nested try/catches in the guard.
+ %% (v3_core never produces nested try/catches
+ %% in guards, so it must have been another
+ %% Core Erlang translator.)
+ throw:protected_violation ->
+ failed;
+
+ %% Failed to work out the live registers for a GC
+ %% BIF. For example, if the number of live registers
+ %% needed to be 4 because {x,3} was a source register,
+ %% but {x,2} was not known to be initialized, this
+ %% exception would be thrown.
+ throw:gc_bif_alloc_failure ->
failed
+
end
end.
@@ -665,10 +674,16 @@ put_reg_1(V, [], I) -> [{I,V}].
fetch_reg(V, [{I,V}|_]) -> {x,I};
fetch_reg(V, [_|SRs]) -> fetch_reg(V, SRs).
-live_regs(Regs) ->
- foldl(fun ({I,_}, _) ->
- I
- end, -1, Regs)+1.
+live_regs([{_,reserved}|_]) ->
+ %% We are not sure that this register is initialized, so we must
+ %% abort the optimization.
+ throw(gc_bif_alloc_failure);
+live_regs([{I,_}]) ->
+ I+1;
+live_regs([{_,_}|Regs]) ->
+ live_regs(Regs);
+live_regs([]) ->
+ 0.
%%%
diff --git a/lib/compiler/src/beam_clean.erl b/lib/compiler/src/beam_clean.erl
index b653998252..b68b8702e0 100644
--- a/lib/compiler/src/beam_clean.erl
+++ b/lib/compiler/src/beam_clean.erl
@@ -234,31 +234,6 @@ replace([{bs_init,{f,Lbl},Info,Live,Ss,Dst}|Is], Acc, D) when Lbl =/= 0 ->
replace(Is, [{bs_init,{f,label(Lbl, D)},Info,Live,Ss,Dst}|Acc], D);
replace([{bs_put,{f,Lbl},Info,Ss}|Is], Acc, D) when Lbl =/= 0 ->
replace(Is, [{bs_put,{f,label(Lbl, D)},Info,Ss}|Acc], D);
-replace([{bs_init2,{f,Lbl},Sz,Words,R,F,Dst}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{bs_init2,{f,label(Lbl, D)},Sz,Words,R,F,Dst}|Acc], D);
-replace([{bs_init_bits,{f,Lbl},Sz,Words,R,F,Dst}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{bs_init_bits,{f,label(Lbl, D)},Sz,Words,R,F,Dst}|Acc], D);
-replace([{bs_put_integer,{f,Lbl},Bits,Unit,Fl,Val}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{bs_put_integer,{f,label(Lbl, D)},Bits,Unit,Fl,Val}|Acc], D);
-replace([{bs_put_utf8=I,{f,Lbl},Fl,Val}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{I,{f,label(Lbl, D)},Fl,Val}|Acc], D);
-replace([{bs_put_utf16=I,{f,Lbl},Fl,Val}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{I,{f,label(Lbl, D)},Fl,Val}|Acc], D);
-replace([{bs_put_utf32=I,{f,Lbl},Fl,Val}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{I,{f,label(Lbl, D)},Fl,Val}|Acc], D);
-replace([{bs_put_binary,{f,Lbl},Bits,Unit,Fl,Val}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{bs_put_binary,{f,label(Lbl, D)},Bits,Unit,Fl,Val}|Acc], D);
-replace([{bs_put_float,{f,Lbl},Bits,Unit,Fl,Val}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{bs_put_float,{f,label(Lbl, D)},Bits,Unit,Fl,Val}|Acc], D);
-replace([{bs_add,{f,Lbl},Src,Dst}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{bs_add,{f,label(Lbl, D)},Src,Dst}|Acc], D);
-replace([{bs_append,{f,Lbl},_,_,_,_,_,_,_}=I0|Is], Acc, D) when Lbl =/= 0 ->
- I = setelement(2, I0, {f,label(Lbl, D)}),
- replace(Is, [I|Acc], D);
-replace([{bs_utf8_size=I,{f,Lbl},Src,Dst}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{I,{f,label(Lbl, D)},Src,Dst}|Acc], D);
-replace([{bs_utf16_size=I,{f,Lbl},Src,Dst}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{I,{f,label(Lbl, D)},Src,Dst}|Acc], D);
replace([{put_map=I,{f,Lbl},Op,Src,Dst,Live,List}|Is], Acc, D)
when Lbl =/= 0 ->
replace(Is, [{I,{f,label(Lbl, D)},Op,Src,Dst,Live,List}|Acc], D);
diff --git a/lib/compiler/src/beam_dead.erl b/lib/compiler/src/beam_dead.erl
index b15adfa889..7cd07dc3be 100644
--- a/lib/compiler/src/beam_dead.erl
+++ b/lib/compiler/src/beam_dead.erl
@@ -21,112 +21,10 @@
-export([module/2]).
-%%% The following optimisations are done:
-%%%
-%%% (1) In this code
-%%%
-%%% move DeadValue {x,0}
-%%% jump L2
-%%% .
-%%% .
-%%% .
-%%% L2: move Anything {x,0}
-%%% .
-%%% .
-%%% .
-%%%
-%%% the first assignment to {x,0} has no effect (is dead),
-%%% so it can be removed. Besides removing a move instruction,
-%%% if the move was preceeded by a label, the resulting code
-%%% will look this
-%%%
-%%% L1: jump L2
-%%% .
-%%% .
-%%% .
-%%% L2: move Anything {x,0}
-%%% .
-%%% .
-%%% .
-%%%
-%%% which can be further optimized by the jump optimizer (beam_jump).
-%%%
-%%% (2) In this code
-%%%
-%%% L1: move AtomLiteral {x,0}
-%%% jump L2
-%%% .
-%%% .
-%%% .
-%%% L2: test is_atom FailLabel {x,0}
-%%% select_val {x,0}, FailLabel [... AtomLiteral => L3...]
-%%% .
-%%% .
-%%% .
-%%% L3: ...
-%%%
-%%% FailLabel: ...
-%%%
-%%% the first code fragment can be changed to
-%%%
-%%% L1: move AtomLiteral {x,0}
-%%% jump L3
-%%%
-%%% If the literal is not included in the table of literals in the
-%%% select_val instruction, the first code fragment will instead be
-%%% rewritten as:
-%%%
-%%% L1: move AtomLiteral {x,0}
-%%% jump FailLabel
-%%%
-%%% The move instruction will be removed by optimization (1) above,
-%%% if the code following the L3 label overwrites {x,0}.
-%%%
-%%% The code following the L2 label will be kept, but it will be removed later
-%%% by the jump optimizer.
-%%%
-%%% (3) In this code
-%%%
-%%% test is_eq_exact ALabel Src Dst
-%%% move Src Dst
-%%%
-%%% the move instruction can be removed.
-%%% Same thing for
-%%%
-%%% test is_nil ALabel Dst
-%%% move [] Dst
-%%%
-%%%
-%%% (4) In this code
-%%%
-%%% select_val {x,Reg}, ALabel [... Literal => L1...]
-%%% .
-%%% .
-%%% .
-%%% L1: move Literal {x,Reg}
-%%%
-%%% we can remove the move instruction.
-%%%
-%%% (5) In the following code
-%%%
-%%% bif '=:=' Fail Src1 Src2 {x,0}
-%%% jump L1
-%%% .
-%%% .
-%%% .
-%%% L1: select_val {x,0}, ALabel [... true => L2..., ...false => L3...]
-%%% .
-%%% .
-%%% .
-%%% L2: .... L3: ....
-%%%
-%%% the first two instructions can be replaced with
-%%%
-%%% test is_eq_exact L3 Src1 Src2
-%%% jump L2
-%%%
-%%% provided that {x,0} is killed at both L2 and L3.
-%%%
+%%% Dead code is code that is executed but has no effect. This
+%%% optimization pass either removes dead code or jumps around it,
+%%% potentially making it unreachable and a target for the
+%%% the beam_jump pass.
-import(lists, [mapfoldl/3,reverse/1]).
@@ -173,7 +71,28 @@ move_move_into_block([I|Is], Acc) ->
move_move_into_block([], Acc) -> reverse(Acc).
%%%
-%%% Scan instructions in execution order and remove dead code.
+%%% Scan instructions in execution order and remove redundant 'move'
+%%% instructions. 'move' instructions are redundant if we know that
+%%% the register already contains the value being assigned, as in the
+%%% following code:
+%%%
+%%% test is_eq_exact SomeLabel Src Dst
+%%% move Src Dst
+%%%
+%%% or in:
+%%%
+%%% test is_nil SomeLabel Dst
+%%% move nil Dst
+%%%
+%%% or in:
+%%%
+%%% select_val Register FailLabel [... Literal => L1...]
+%%% .
+%%% .
+%%% .
+%%% L1: move Literal Register
+%%%
+%%% Also add extra labels to help the second backward pass.
%%%
forward(Is, Lc) ->
@@ -215,15 +134,13 @@ forward([{test,is_eq_exact,_,[Dst,Src]}=I,{move,Src,Dst}|Is], D, Lc, Acc) ->
forward([I|Is], D, Lc, Acc);
forward([{test,is_nil,_,[Dst]}=I,{move,nil,Dst}|Is], D, Lc, Acc) ->
forward([I|Is], D, Lc, Acc);
-forward([{test,is_eq_exact,_,_}=I|Is], D, Lc, Acc) ->
- case Is of
- [{label,_}|_] -> forward(Is, D, Lc, [I|Acc]);
- _ -> forward(Is, D, Lc+1, [{label,Lc},I|Acc])
- end;
-forward([{test,is_ne_exact,_,_}=I|Is], D, Lc, Acc) ->
- case Is of
- [{label,_}|_] -> forward(Is, D, Lc, [I|Acc]);
- _ -> forward(Is, D, Lc+1, [{label,Lc},I|Acc])
+forward([{test,_,_,_}=I|Is]=Is0, D, Lc, Acc) ->
+ %% Help the second, backward pass to by inserting labels after
+ %% relational operators so that they can be skipped if they are
+ %% known to be true.
+ case useful_to_insert_label(Is0) of
+ false -> forward(Is, D, Lc, [I|Acc]);
+ true -> forward(Is, D, Lc+1, [{label,Lc},I|Acc])
end;
forward([I|Is], D, Lc, Acc) ->
forward(Is, D, Lc, [I|Acc]);
@@ -239,9 +156,49 @@ update_value_dict([Lit,{f,Lbl}|T], Reg, D0) ->
update_value_dict(T, Reg, D);
update_value_dict([], _, D) -> D.
+useful_to_insert_label([_,{label,_}|_]) ->
+ false;
+useful_to_insert_label([{test,Op,_,_}|_]) ->
+ case Op of
+ is_lt -> true;
+ is_ge -> true;
+ is_eq_exact -> true;
+ is_ne_exact -> true;
+ _ -> false
+ end.
+
+%%%
+%%% Scan instructions in reverse execution order and try to
+%%% shortcut branch instructions.
+%%%
+%%% For example, in this code:
+%%%
+%%% move Literal Register
+%%% jump L1
+%%% .
+%%% .
+%%% .
+%%% L1: test is_{integer,atom} FailLabel Register
+%%% select_val {x,0} FailLabel [... Literal => L2...]
+%%% .
+%%% .
+%%% .
+%%% L2: ...
%%%
-%%% Scan instructions in reverse execution order and remove dead code.
+%%% the 'selectval' instruction will always transfer control to L2,
+%%% so we can just as well jump to L2 directly by rewriting the
+%%% first part of the sequence like this:
%%%
+%%% move Literal Register
+%%% jump L2
+%%%
+%%% If register Register is killed at label L2, we can remove the
+%%% 'move' instruction, leaving just the 'jump' instruction:
+%%%
+%%% jump L2
+%%%
+%%% These transformations may leave parts of the code unreachable.
+%%% The beam_jump pass will remove the unreachable code.
backward(Is, D) ->
backward(Is, D, []).
@@ -277,15 +234,10 @@ backward([{select,select_val,Reg,{f,Fail0},List0}|Is], D, Acc) ->
Fail = shortcut_bs_test(Fail1, Is, D),
Sel = {select,select_val,Reg,{f,Fail},List},
backward(Is, D, [Sel|Acc]);
-backward([{jump,{f,To0}},{move,Src,Reg}=Move0|Is], D, Acc) ->
- {To,Move} = case Src of
- {atom,Val0} ->
- To1 = shortcut_select_label(To0, Reg, Val0, D),
- {To2,Val} = shortcut_boolean_label(To1, Reg, Val0, D),
- {To2,{move,{atom,Val},Reg}};
- _ ->
- {shortcut_label(To0, D),Move0}
- end,
+backward([{jump,{f,To0}},{move,Src0,Reg}|Is], D, Acc) ->
+ To1 = shortcut_select_label(To0, Reg, Src0, D),
+ {To,Src} = shortcut_boolean_label(To1, Reg, Src0, D),
+ Move = {move,Src,Reg},
Jump = {jump,{f,To}},
case beam_utils:is_killed_at(Reg, To, D) of
false -> backward([Move|Is], D, [Jump|Acc]);
@@ -301,28 +253,25 @@ backward([{test,bs_start_match2,{f,To0},Live,[Src|_]=Info,Dst}|Is], D, Acc) ->
To = shortcut_bs_start_match(To0, Src, D),
I = {test,bs_start_match2,{f,To},Live,Info,Dst},
backward(Is, D, [I|Acc]);
-backward([{test,is_eq_exact,{f,To0},[Reg,{atom,Val}]=Ops}|Is], D, Acc) ->
- To1 = shortcut_bs_test(To0, Is, D),
- To = shortcut_fail_label(To1, Reg, Val, D),
- I = combine_eqs(To, Ops, D, Acc),
- backward(Is, D, [I|Acc]);
backward([{test,Op,{f,To0},Ops0}|Is], D, Acc) ->
To1 = shortcut_bs_test(To0, Is, D),
To2 = shortcut_label(To1, D),
+ To3 = shortcut_rel_op(To2, Op, Ops0, D),
+
%% Try to shortcut a repeated test:
%%
%% test Op {f,Fail1} Operands test Op {f,Fail2} Operands
%% . . . ==> ...
%% Fail1: test Op {f,Fail2} Operands Fail1: test Op {f,Fail2} Operands
%%
- To = case beam_utils:code_at(To2, D) of
- [{test,Op,{f,To3},Ops}|_] ->
+ To = case beam_utils:code_at(To3, D) of
+ [{test,Op,{f,To4},Ops}|_] ->
case equal_ops(Ops0, Ops) of
- true -> To3;
- false -> To2
+ true -> To4;
+ false -> To3
end;
_Code ->
- To2
+ To3
end,
I = case Op of
is_eq_exact -> combine_eqs(To, Ops0, D, Acc);
@@ -367,8 +316,8 @@ equal_ops([Op|T0], [Op|T1]) ->
equal_ops([], []) -> true;
equal_ops(_, _) -> false.
-shortcut_select_list([{_,Val}=Lit,{f,To0}|T], Reg, D, Acc) ->
- To = shortcut_select_label(To0, Reg, Val, D),
+shortcut_select_list([Lit,{f,To0}|T], Reg, D, Acc) ->
+ To = shortcut_select_label(To0, Reg, Lit, D),
shortcut_select_list(T, Reg, D, [{f,To},Lit|Acc]);
shortcut_select_list([], _, _, Acc) -> reverse(Acc).
@@ -378,58 +327,39 @@ shortcut_label(To0, D) ->
_ -> To0
end.
-shortcut_select_label(To0, Reg, Val, D) ->
- case beam_utils:code_at(To0, D) of
- [{jump,{f,To}}|_] ->
- shortcut_select_label(To, Reg, Val, D);
- [{test,is_atom,_,[Reg]},{select,select_val,Reg,{f,Fail},Map}|_] ->
- To = find_select_val(Map, Val, Fail),
- shortcut_select_label(To, Reg, Val, D);
- [{test,is_eq_exact,{f,_},[Reg,{atom,Val}]},{label,To}|_] when is_atom(Val) ->
- shortcut_select_label(To, Reg, Val, D);
- [{test,is_eq_exact,{f,_},[Reg,{atom,Val}]},{jump,{f,To}}|_] when is_atom(Val) ->
- shortcut_select_label(To, Reg, Val, D);
- [{test,is_eq_exact,{f,To},[Reg,{atom,AnotherVal}]}|_]
- when is_atom(Val), Val =/= AnotherVal ->
- shortcut_select_label(To, Reg, Val, D);
- [{test,is_ne_exact,{f,To},[Reg,{atom,Val}]}|_] when is_atom(Val) ->
- shortcut_select_label(To, Reg, Val, D);
- [{test,is_ne_exact,{f,_},[Reg,{atom,_}]},{label,To}|_] when is_atom(Val) ->
- shortcut_select_label(To, Reg, Val, D);
- [{test,is_tuple,{f,To},[Reg]}|_] when is_atom(Val) ->
- shortcut_select_label(To, Reg, Val, D);
- _ ->
- To0
- end.
+shortcut_select_label(To, Reg, Lit, D) ->
+ shortcut_rel_op(To, is_ne_exact, [Reg,Lit], D).
-shortcut_fail_label(To0, Reg, Val, D) ->
- case beam_utils:code_at(To0, D) of
- [{jump,{f,To}}|_] ->
- shortcut_fail_label(To, Reg, Val, D);
- [{test,is_eq_exact,{f,To},[Reg,{atom,Val}]}|_] when is_atom(Val) ->
- shortcut_fail_label(To, Reg, Val, D);
- _ ->
- To0
- end.
-
-shortcut_boolean_label(To0, Reg, Bool0, D) when is_boolean(Bool0) ->
+shortcut_boolean_label(To0, Reg, {atom,Bool0}=Lit, D) when is_boolean(Bool0) ->
case beam_utils:code_at(To0, D) of
[{line,_},{bif,'not',_,[Reg],Reg},{jump,{f,To}}|_] ->
- Bool = not Bool0,
+ Bool = {atom,not Bool0},
{shortcut_select_label(To, Reg, Bool, D),Bool};
_ ->
- {To0,Bool0}
+ {To0,Lit}
end;
shortcut_boolean_label(To, _, Bool, _) -> {To,Bool}.
-find_select_val([{_,Val},{f,To}|_], Val, _) -> To;
-find_select_val([{_,_}, {f,_}|T], Val, Fail) ->
- find_select_val(T, Val, Fail);
-find_select_val([], _, Fail) -> Fail.
+%% Replace a comparison operator with a test instruction and a jump.
+%% For example, if we have this code:
+%%
+%% bif '=:=' Fail Src1 Src2 {x,0}
+%% jump L1
+%% .
+%% .
+%% .
+%% L1: select_val {x,0} FailLabel [... true => L2..., ...false => L3...]
+%%
+%% the first two instructions can be replaced with
+%%
+%% test is_eq_exact L3 Src1 Src2
+%% jump L2
+%%
+%% provided that {x,0} is killed at both L2 and L3.
replace_comp_op(To, Reg, Op, Ops, D) ->
- False = comp_op_find_shortcut(To, Reg, false, D),
- True = comp_op_find_shortcut(To, Reg, true, D),
+ False = comp_op_find_shortcut(To, Reg, {atom,false}, D),
+ True = comp_op_find_shortcut(To, Reg, {atom,true}, D),
[bif_to_test(Op, Ops, False),{jump,{f,True}}].
comp_op_find_shortcut(To0, Reg, Val, D) ->
@@ -461,9 +391,9 @@ not_possible() -> throw(not_possible).
%%
%% is_eq_exact F1 Reg Lit1 select_val Reg F2 [ Lit1 L1
%% L1: . Lit2 L2 ]
-%% .
-%% . ==>
-%% .
+%% .
+%% . ==>
+%% .
%% F1: is_eq_exact F2 Reg Lit2 F1: is_eq_exact F2 Reg Lit2
%% L2: .... L2:
%%
@@ -488,31 +418,26 @@ remove_from_list(Lit, [Val,{f,_}=Fail|T]) ->
[Val,Fail|remove_from_list(Lit, T)];
remove_from_list(_, []) -> [].
-%% shortcut_bs_test(TargetLabel, [Instruction], D) -> TargetLabel'
-%% Try to shortcut the failure label for a bit syntax matching.
-%% We know that the binary contains at least Bits bits after
-%% the latest save point.
+%% shortcut_bs_test(TargetLabel, ReversedInstructions, D) -> TargetLabel'
+%% Try to shortcut the failure label for bit syntax matching.
shortcut_bs_test(To, Is, D) ->
shortcut_bs_test_1(beam_utils:code_at(To, D), Is, To, D).
-shortcut_bs_test_1([{bs_restore2,Reg,SavePoint}|Is], PrevIs, To, D) ->
- shortcut_bs_test_2(Is, {Reg,SavePoint}, PrevIs, To, D);
-shortcut_bs_test_1([_|_], _, To, _) -> To.
-
-shortcut_bs_test_2([{label,_}|Is], Save, PrevIs, To, D) ->
- shortcut_bs_test_2(Is, Save, PrevIs, To, D);
-shortcut_bs_test_2([{test,bs_test_tail2,{f,To},[_,TailBits]}|_],
- {Reg,_Point} = RP, PrevIs, To0, D) ->
- case count_bits_matched(PrevIs, RP, 0) of
+shortcut_bs_test_1([{bs_restore2,Reg,SavePoint},
+ {label,_},
+ {test,bs_test_tail2,{f,To},[_,TailBits]}|_],
+ PrevIs, To0, D) ->
+ case count_bits_matched(PrevIs, {Reg,SavePoint}, 0) of
Bits when Bits > TailBits ->
%% This instruction will fail. We know because a restore has been
- %% done from the previous point SavePoint in the binary, and we also know
- %% that the binary contains at least Bits bits from SavePoint.
+ %% done from the previous point SavePoint in the binary, and we
+ %% also know that the binary contains at least Bits bits from
+ %% SavePoint.
%%
%% Since we will skip a bs_restore2 if we shortcut to label To,
- %% we must now make sure that code at To does not depend on the position
- %% in the context in any way.
+ %% we must now make sure that code at To does not depend on
+ %% the position in the context in any way.
case shortcut_bs_pos_used(To, Reg, D) of
false -> To;
true -> To0
@@ -520,8 +445,19 @@ shortcut_bs_test_2([{test,bs_test_tail2,{f,To},[_,TailBits]}|_],
_Bits ->
To0
end;
-shortcut_bs_test_2([_|_], _, _, To, _) -> To.
+shortcut_bs_test_1([_|_], _, To, _) -> To.
+%% counts_bits_matched(ReversedInstructions, SavePoint, Bits) -> Bits'
+%% Given a reversed instruction stream, determine the minimum number
+%% of bits that will be matched by bit syntax instructions up to the
+%% given save point.
+
+count_bits_matched([{test,bs_get_utf8,{f,_},_,_,_}|Is], SavePoint, Bits) ->
+ count_bits_matched(Is, SavePoint, Bits+8);
+count_bits_matched([{test,bs_get_utf16,{f,_},_,_,_}|Is], SavePoint, Bits) ->
+ count_bits_matched(Is, SavePoint, Bits+16);
+count_bits_matched([{test,bs_get_utf32,{f,_},_,_,_}|Is], SavePoint, Bits) ->
+ count_bits_matched(Is, SavePoint, Bits+32);
count_bits_matched([{test,_,_,_,[_,Sz,U,{field_flags,_}],_}|Is], SavePoint, Bits) ->
case Sz of
{integer,N} -> count_bits_matched(Is, SavePoint, Bits+N*U);
@@ -545,20 +481,332 @@ shortcut_bs_pos_used_1(Is, Reg, D) ->
not beam_utils:is_killed(Reg, Is, D).
%% shortcut_bs_start_match(TargetLabel, Reg) -> TargetLabel
-%% A failing bs_start_match2 instruction means that the source
-%% cannot be a binary, so there is no need to jump bs_context_to_binary/1
-%% or another bs_start_match2 instruction.
+%% A failing bs_start_match2 instruction means that the source (Reg)
+%% cannot be a binary. That means that it is safe to skip
+%% bs_context_to_binary instructions operating on Reg, and
+%% bs_start_match2 instructions operating on Reg.
shortcut_bs_start_match(To, Reg, D) ->
- shortcut_bs_start_match_1(beam_utils:code_at(To, D), Reg, To).
+ shortcut_bs_start_match_1(beam_utils:code_at(To, D), Reg, To, D).
+
+shortcut_bs_start_match_1([{bs_context_to_binary,Reg}|Is], Reg, To, D) ->
+ shortcut_bs_start_match_1(Is, Reg, To, D);
+shortcut_bs_start_match_1([{jump,{f,To}}|_], Reg, _, D) ->
+ Code = beam_utils:code_at(To, D),
+ shortcut_bs_start_match_1(Code, Reg, To, D);
+shortcut_bs_start_match_1([{test,bs_start_match2,{f,To},_,[Reg|_],_}|_],
+ Reg, _, D) ->
+ Code = beam_utils:code_at(To, D),
+ shortcut_bs_start_match_1(Code, Reg, To, D);
+shortcut_bs_start_match_1(_, _, To, _) ->
+ To.
-shortcut_bs_start_match_1([{bs_context_to_binary,Reg}|Is], Reg, To) ->
- shortcut_bs_start_match_2(Is, Reg, To);
-shortcut_bs_start_match_1(_, _, To) -> To.
+%% shortcut_rel_op(FailLabel, Operator, [Operand], D) -> FailLabel'
+%% Try to shortcut the given test instruction. Example:
+%%
+%% is_ge L1 {x,0} 48
+%% .
+%% .
+%% .
+%% L1: is_ge L2 {x,0} 65
+%%
+%% The first test instruction can be rewritten to "is_ge L2 {x,0} 48"
+%% since the instruction at L1 will also fail.
+%%
+%% If there are instructions between L1 and the other test instruction
+%% it may still be possible to do the shortcut. For example:
+%%
+%% L1: is_eq_exact L3 {x,0} 92
+%% is_ge L2 {x,0} 65
+%%
+%% Since the first test instruction failed, we know that {x,0} must
+%% be less than 48; therefore, we know that {x,0} cannot be equal to
+%% 92 and the jump to L3 cannot happen.
+
+shortcut_rel_op(To, Op, Ops, D) ->
+ case normalize_op({test,Op,{f,To},Ops}) of
+ {{NormOp,A,B},_} ->
+ Normalized = {negate_op(NormOp),A,B},
+ shortcut_rel_op_fp(To, Normalized, D);
+ {_,_} ->
+ To;
+ error ->
+ To
+ end.
-shortcut_bs_start_match_2([{jump,{f,To}}|_], _, _) ->
- To;
-shortcut_bs_start_match_2([{test,bs_start_match2,{f,To},_,[Reg|_],_}|_], Reg, _) ->
- To;
-shortcut_bs_start_match_2(_Is, _Reg, To) ->
- To.
+shortcut_rel_op_fp(To0, Normalized, D) ->
+ Code = beam_utils:code_at(To0, D),
+ case shortcut_any_label(Code, Normalized) of
+ error ->
+ To0;
+ To ->
+ shortcut_rel_op_fp(To, Normalized, D)
+ end.
+
+%% shortcut_any_label([Instruction], PrevCondition) -> FailLabel | error
+%% Using PrevCondition (a previous condition known to be true),
+%% try to shortcut to another failure label.
+
+shortcut_any_label([{jump,{f,Lbl}}|_], _Prev) ->
+ Lbl;
+shortcut_any_label([{label,Lbl}|_], _Prev) ->
+ Lbl;
+shortcut_any_label([{select,select_val,R,{f,Fail},L}|_], Prev) ->
+ shortcut_selectval(L, R, Fail, Prev);
+shortcut_any_label([I|Is], Prev) ->
+ case normalize_op(I) of
+ error ->
+ error;
+ {Normalized,Fail} ->
+ %% We have a relational operator.
+ case will_succeed(Prev, Normalized) of
+ no ->
+ %% This test instruction will always branch
+ %% to Fail.
+ Fail;
+ yes ->
+ %% This test instruction will never branch,
+ %% so we will look at the next instruction.
+ shortcut_any_label(Is, Prev);
+ maybe ->
+ %% May or may not branch. From now on, we can only
+ %% shortcut to the this specific failure label
+ %% Fail.
+ shortcut_specific_label(Is, Fail, Prev)
+ end
+ end.
+
+%% shortcut_specific_label([Instruction], FailLabel, PrevCondition) ->
+%% FailLabel | error
+%% We have previously encountered a test instruction that may or
+%% may not branch to FailLabel. Therefore we are only allowed
+%% to do the shortcut to the same fail label (FailLabel).
+
+shortcut_specific_label([{label,_}|Is], Fail, Prev) ->
+ shortcut_specific_label(Is, Fail, Prev);
+shortcut_specific_label([{select,select_val,R,{f,F},L}|_], Fail, Prev) ->
+ case shortcut_selectval(L, R, F, Prev) of
+ Fail -> Fail;
+ _ -> error
+ end;
+shortcut_specific_label([I|Is], Fail, Prev) ->
+ case normalize_op(I) of
+ error ->
+ error;
+ {Normalized,Fail} ->
+ case will_succeed(Prev, Normalized) of
+ no ->
+ %% Will branch to FailLabel.
+ Fail;
+ yes ->
+ %% Will definitely never branch.
+ shortcut_specific_label(Is, Fail, Prev);
+ maybe ->
+ %% May branch, but still OK since it will branch
+ %% to FailLabel.
+ shortcut_specific_label(Is, Fail, Prev)
+ end;
+ {Normalized,_} ->
+ %% This test instruction will branch to a different
+ %% fail label, if it branches at all.
+ case will_succeed(Prev, Normalized) of
+ yes ->
+ %% Still OK, since the branch will never be
+ %% taken.
+ shortcut_specific_label(Is, Fail, Prev);
+ no ->
+ %% Give up. The branch will definitely be taken
+ %% to a different fail label.
+ error;
+ maybe ->
+ %% Give up. If the branch is taken, it will be
+ %% to a different fail label.
+ error
+ end
+ end.
+
+
+%% shortcut_selectval(List, Reg, Fail, PrevCond) -> FailLabel | error
+%% Try to shortcut a selectval instruction. A selectval instruction
+%% is equivalent to the following instruction sequence:
+%%
+%% is_ne_exact L1 Reg Value1
+%% .
+%% .
+%% .
+%% is_ne_exact LN Reg ValueN
+%% jump DefaultFailLabel
+%%
+shortcut_selectval([Val,{f,Lbl}|T], R, Fail, Prev) ->
+ case will_succeed(Prev, {'=/=',R,get_literal(Val)}) of
+ yes -> shortcut_selectval(T, R, Fail, Prev);
+ no -> Lbl;
+ maybe -> error
+ end;
+shortcut_selectval([], _, Fail, _) -> Fail.
+
+%% will_succeed(PrevCondition, Condition) -> yes | no | maybe
+%% PrevCondition is a condition known to be true. This function
+%% will tell whether Condition will succeed.
+
+will_succeed({Op1,Reg,A}, {Op2,Reg,B}) ->
+ will_succeed_1(Op1, A, Op2, B);
+will_succeed({'=:=',Reg,{literal,A}}, {TypeTest,Reg}) ->
+ case erlang:TypeTest(A) of
+ false -> no;
+ true -> yes
+ end;
+will_succeed({_,_,_}, maybe) ->
+ maybe;
+will_succeed({_,_,_}, Test) when is_tuple(Test) ->
+ maybe.
+
+will_succeed_1('=:=', A, '<', B) ->
+ if
+ B =< A -> no;
+ true -> yes
+ end;
+will_succeed_1('=:=', A, '=<', B) ->
+ if
+ B < A -> no;
+ true -> yes
+ end;
+will_succeed_1('=:=', A, '=:=', B) ->
+ if
+ A =:= B -> yes;
+ true -> no
+ end;
+will_succeed_1('=:=', A, '=/=', B) ->
+ if
+ A =:= B -> no;
+ true -> yes
+ end;
+will_succeed_1('=:=', A, '>=', B) ->
+ if
+ B > A -> no;
+ true -> yes
+ end;
+will_succeed_1('=:=', A, '>', B) ->
+ if
+ B >= A -> no;
+ true -> yes
+ end;
+
+will_succeed_1('=/=', A, '=/=', B) when A =:= B -> yes;
+will_succeed_1('=/=', A, '=:=', B) when A =:= B -> no;
+
+will_succeed_1('<', A, '=:=', B) when B >= A -> no;
+will_succeed_1('<', A, '=/=', B) when B >= A -> yes;
+will_succeed_1('<', A, '<', B) when B >= A -> yes;
+will_succeed_1('<', A, '=<', B) when B > A -> yes;
+will_succeed_1('<', A, '>=', B) when B > A -> no;
+will_succeed_1('<', A, '>', B) when B >= A -> no;
+
+will_succeed_1('=<', A, '=:=', B) when B > A -> no;
+will_succeed_1('=<', A, '=/=', B) when B > A -> yes;
+will_succeed_1('=<', A, '<', B) when B > A -> yes;
+will_succeed_1('=<', A, '=<', B) when B >= A -> yes;
+will_succeed_1('=<', A, '>=', B) when B > A -> no;
+will_succeed_1('=<', A, '>', B) when B >= A -> no;
+
+will_succeed_1('>=', A, '=:=', B) when B < A -> no;
+will_succeed_1('>=', A, '=/=', B) when B < A -> yes;
+will_succeed_1('>=', A, '<', B) when B =< A -> no;
+will_succeed_1('>=', A, '=<', B) when B < A -> no;
+will_succeed_1('>=', A, '>=', B) when B =< A -> yes;
+will_succeed_1('>=', A, '>', B) when B < A -> yes;
+
+will_succeed_1('>', A, '=:=', B) when B =< A -> no;
+will_succeed_1('>', A, '=/=', B) when B =< A -> yes;
+will_succeed_1('>', A, '<', B) when B =< A -> no;
+will_succeed_1('>', A, '=<', B) when B < A -> no;
+will_succeed_1('>', A, '>=', B) when B =< A -> yes;
+will_succeed_1('>', A, '>', B) when B < A -> yes;
+
+will_succeed_1(_, _, _, _) -> maybe.
+
+%% normalize_op(Instruction) -> {Normalized,FailLabel} | error
+%% Normalized = {Operator,Register,Literal} |
+%% {TypeTest,Register} |
+%% maybe
+%% Operation = '<' | '=<' | '=:=' | '=/=' | '>=' | '>'
+%% TypeTest = is_atom | is_integer ...
+%% Literal = {literal,Term}
+%%
+%% Normalize a relational operator to facilitate further
+%% comparisons between operators. Always make the register
+%% operand the first operand. Thus the following instruction:
+%%
+%% {test,is_ge,{f,99},{integer,13},{x,0}}
+%%
+%% will be normalized to:
+%%
+%% {'=<',{x,0},{literal,13}}
+%%
+%% NOTE: Bit syntax test instructions are scary. They may change the
+%% state of match contexts and update registers, so we don't dare
+%% mess with them.
+
+normalize_op({test,is_ge,{f,Fail},Ops}) ->
+ normalize_op_1('>=', Ops, Fail);
+normalize_op({test,is_lt,{f,Fail},Ops}) ->
+ normalize_op_1('<', Ops, Fail);
+normalize_op({test,is_eq_exact,{f,Fail},Ops}) ->
+ normalize_op_1('=:=', Ops, Fail);
+normalize_op({test,is_ne_exact,{f,Fail},Ops}) ->
+ normalize_op_1('=/=', Ops, Fail);
+normalize_op({test,is_nil,{f,Fail},[R]}) ->
+ normalize_op_1('=:=', [R,nil], Fail);
+normalize_op({test,Op,{f,Fail},[R]}) ->
+ case erl_internal:new_type_test(Op, 1) of
+ true -> {{Op,R},Fail};
+ false -> {maybe,Fail}
+ end;
+normalize_op({test,_,{f,Fail},_}=I) ->
+ case beam_utils:is_pure_test(I) of
+ true -> {maybe,Fail};
+ false -> error
+ end;
+normalize_op(_) ->
+ error.
+
+normalize_op_1(Op, [Op1,Op2], Fail) ->
+ case {get_literal(Op1),get_literal(Op2)} of
+ {error,error} ->
+ %% Both operands are registers.
+ {maybe,Fail};
+ {error,Lit} ->
+ {{Op,Op1,Lit},Fail};
+ {Lit,error} ->
+ {{turn_op(Op),Op2,Lit},Fail};
+ {_,_} ->
+ %% Both operands are literals. Can probably only
+ %% happen if the Core Erlang optimizations passes were
+ %% turned off, so don't bother trying to do something
+ %% smart here.
+ {maybe,Fail}
+ end.
+
+turn_op('<') -> '>';
+turn_op('>=') -> '=<';
+turn_op('=:='=Op) -> Op;
+turn_op('=/='=Op) -> Op.
+
+negate_op('>=') -> '<';
+negate_op('<') -> '>=';
+negate_op('=<') -> '>';
+negate_op('>') -> '=<';
+negate_op('=:=') -> '=/=';
+negate_op('=/=') -> '=:='.
+
+get_literal({atom,Val}) ->
+ {literal,Val};
+get_literal({integer,Val}) ->
+ {literal,Val};
+get_literal({float,Val}) ->
+ {literal,Val};
+get_literal(nil) ->
+ {literal,[]};
+get_literal({literal,_}=Lit) ->
+ Lit;
+get_literal({_,_}) -> error.
diff --git a/lib/compiler/src/beam_flatten.erl b/lib/compiler/src/beam_flatten.erl
index 05d067dc48..54e06df995 100644
--- a/lib/compiler/src/beam_flatten.erl
+++ b/lib/compiler/src/beam_flatten.erl
@@ -63,9 +63,7 @@ norm({set,[],[S,D],{set_tuple_element,I}}) -> {set_tuple_element,S,D,I};
norm({set,[D1,D2],[S],get_list}) -> {get_list,S,D1,D2};
norm({set,[D],[S|Puts],{alloc,R,{put_map,Op,F}}}) ->
{put_map,F,Op,S,D,R,{list,Puts}};
-norm({set,Ds,[S|Ss],{get_map_elements,F}}) ->
- Gets = beam_utils:joineven(Ss,Ds),
- {get_map_elements,F,S,{list,Gets}};
+%% get_map_elements is always handled in beam_split (moved out of block)
norm({set,[],[],remove_message}) -> remove_message;
norm({set,[],[],fclearerror}) -> fclearerror;
norm({set,[],[],fcheckerror}) -> {fcheckerror,{f,0}}.
diff --git a/lib/compiler/src/beam_jump.erl b/lib/compiler/src/beam_jump.erl
index b952139f2c..ba71d4efae 100644
--- a/lib/compiler/src/beam_jump.erl
+++ b/lib/compiler/src/beam_jump.erl
@@ -166,6 +166,12 @@ share_1([{label,L}=Lbl|Is], Dict0, Seq, Acc) ->
end;
share_1([{func_info,_,_,_}=I|Is], _, [], Acc) ->
reverse(Is, [I|Acc]);
+share_1([{'try',_,_}=I|Is], Dict0, Seq, Acc) ->
+ Dict = clean_non_sharable(Dict0),
+ share_1(Is, Dict, [I|Seq], Acc);
+share_1([{try_case,_}=I|Is], Dict0, Seq, Acc) ->
+ Dict = clean_non_sharable(Dict0),
+ share_1(Is, Dict, [I|Seq], Acc);
share_1([I|Is], Dict, Seq, Acc) ->
case is_unreachable_after(I) of
false ->
@@ -174,6 +180,24 @@ share_1([I|Is], Dict, Seq, Acc) ->
share_1(Is, Dict, [I], Acc)
end.
+clean_non_sharable(Dict) ->
+ %% We are passing in or out of a 'try' block. Remove
+ %% sequences that should not shared over the boundaries
+ %% of a 'try' block. Since the end of the sequence must match,
+ %% the only possible match between a sequence outside and
+ %% a sequence inside the 'try' block is a sequence that ends
+ %% with an instruction that causes an exception. Any sequence
+ %% that causes an exception must contain a line/1 instruction.
+ dict:filter(fun(K, _V) -> sharable_with_try(K) end, Dict).
+
+sharable_with_try([{line,_}|_]) ->
+ %% This sequence may cause an exception and may potentially
+ %% match a sequence on the other side of the 'try' block
+ %% boundary.
+ false;
+sharable_with_try([_|Is]) ->
+ sharable_with_try(Is);
+sharable_with_try([]) -> true.
%% Eliminate all fallthroughs. Return the result reversed.
@@ -295,12 +319,6 @@ opt([{test,_,{f,_}=Lbl,_,_,_}=I|Is], Acc, St) ->
opt(Is, [I|Acc], label_used(Lbl, St));
opt([{select,_,_R,Fail,Vls}=I|Is], Acc, St) ->
skip_unreachable(Is, [I|Acc], label_used([Fail|Vls], St));
-opt([{label,L}=I|Is], Acc, #st{entry=L}=St) ->
- %% NEVER move the entry label.
- opt(Is, [I|Acc], St);
-opt([{label,L1},{jump,{f,L2}}=I|Is], [Prev|Acc], St0) ->
- St = St0#st{mlbl=dict:append(L2, L1, St0#st.mlbl)},
- opt([Prev,I|Is], Acc, label_used({f,L2}, St));
opt([{label,Lbl}=I|Is], Acc, #st{mlbl=Mlbl}=St0) ->
case dict:find(Lbl, Mlbl) of
{ok,Lbls} ->
@@ -310,9 +328,20 @@ opt([{label,Lbl}=I|Is], Acc, #st{mlbl=Mlbl}=St0) ->
insert_labels([Lbl|Lbls], Is, Acc, St);
error -> opt(Is, [I|Acc], St0)
end;
-opt([{jump,{f,Lbl}},{label,Lbl}=I|Is], Acc, St) ->
- opt([I|Is], Acc, St);
-opt([{jump,Lbl}=I|Is], Acc, St) ->
+opt([{jump,{f,_}=X}|[{label,_},{jump,X}|_]=Is], Acc, St) ->
+ opt(Is, Acc, St);
+opt([{jump,{f,Lbl}}|[{label,Lbl}|_]=Is], Acc, St) ->
+ opt(Is, Acc, St);
+opt([{jump,{f,L}=Lbl}=I|Is], Acc0, #st{mlbl=Mlbl0}=St0) ->
+ %% All labels before this jump instruction should now be
+ %% moved to the location of the jump's target.
+ {Lbls,Acc} = collect_labels(Acc0, St0),
+ St = case Lbls of
+ [] -> St0;
+ [_|_] ->
+ Mlbl = dict:append_list(L, Lbls, Mlbl0),
+ St0#st{mlbl=Mlbl}
+ end,
skip_unreachable(Is, [I|Acc], label_used(Lbl, St));
%% Optimization: quickly handle some common instructions that don't
%% have any failure labels and where is_unreachable_after(I) =:= false.
@@ -349,6 +378,17 @@ insert_fc_labels([L|Ls], Mlbl, Acc0) ->
end;
insert_fc_labels([], _, Acc) -> Acc.
+collect_labels(Is, #st{entry=Entry}) ->
+ collect_labels_1(Is, Entry, []).
+
+collect_labels_1([{label,Entry}|_]=Is, Entry, Acc) ->
+ %% Never move the entry label.
+ {Acc,Is};
+collect_labels_1([{label,L}|Is], Entry, Acc) ->
+ collect_labels_1(Is, Entry, [L|Acc]);
+collect_labels_1(Is, _Entry, Acc) ->
+ {Acc,Is}.
+
%% label_defined(Is, Label) -> true | false.
%% Test whether the label Label is defined at the start of the instruction
%% sequence, possibly preceeded by other label definitions.
@@ -435,14 +475,14 @@ is_label_used_in(Lbl, Is) ->
is_label_used_in_1(Is, Lbl, gb_sets:empty()).
is_label_used_in_1([{block,Block}|Is], Lbl, Empty) ->
- lists:any(fun(I) -> is_label_used_in_2(I, Lbl) end, Block)
+ lists:any(fun(I) -> is_label_used_in_block(I, Lbl) end, Block)
orelse is_label_used_in_1(Is, Lbl, Empty);
is_label_used_in_1([I|Is], Lbl, Empty) ->
Used = ulbl(I, Empty),
gb_sets:is_member(Lbl, Used) orelse is_label_used_in_1(Is, Lbl, Empty);
is_label_used_in_1([], _, _) -> false.
-is_label_used_in_2({set,_,_,Info}, Lbl) ->
+is_label_used_in_block({set,_,_,Info}, Lbl) ->
case Info of
{bif,_,{f,F}} -> F =:= Lbl;
{alloc,_,{gc_bif,_,{f,F}}} -> F =:= Lbl;
@@ -452,7 +492,6 @@ is_label_used_in_2({set,_,_,Info}, Lbl) ->
{put_tuple,_} -> false;
{get_tuple_element,_} -> false;
{set_tuple_element,_} -> false;
- {get_map_elements,{f,F}} -> F =:= Lbl;
{line,_} -> false;
_ when is_atom(Info) -> false
end.
diff --git a/lib/compiler/src/beam_split.erl b/lib/compiler/src/beam_split.erl
index f5dba314ae..0c62b0bf3d 100644
--- a/lib/compiler/src/beam_split.erl
+++ b/lib/compiler/src/beam_split.erl
@@ -53,9 +53,8 @@ split_block([{set,[D],[S|Puts],{alloc,R,{put_map,Op,{f,Lbl}=Fail}}}|Is],
Bl, Acc) when Lbl =/= 0 ->
split_block(Is, [], [{put_map,Fail,Op,S,D,R,{list,Puts}}|
make_block(Bl, Acc)]);
-split_block([{set,Ds,[S|Ss],{get_map_elements,{f,Lbl}=Fail}}|Is], Bl, Acc)
- when Lbl =/= 0 ->
- Gets = beam_utils:joineven(Ss,Ds),
+split_block([{set,Ds,[S|Ss],{get_map_elements,Fail}}|Is], Bl, Acc) ->
+ Gets = beam_utils:join_even(Ss,Ds),
split_block(Is, [], [{get_map_elements,Fail,S,{list,Gets}}|make_block(Bl, Acc)]);
split_block([{set,[R],[],{'catch',L}}|Is], Bl, Acc) ->
split_block(Is, [], [{'catch',R,L}|make_block(Bl, Acc)]);
diff --git a/lib/compiler/src/beam_type.erl b/lib/compiler/src/beam_type.erl
index cdddad4153..26c933481a 100644
--- a/lib/compiler/src/beam_type.erl
+++ b/lib/compiler/src/beam_type.erl
@@ -244,7 +244,7 @@ clearerror([], OrigIs) -> [{set,[],[],fclearerror}|OrigIs].
%% Combine two blocks and eliminate any move instructions that assign
%% to registers that are killed later in the block.
%%
-merge_blocks(B1, [{'%live',_}|B2]) ->
+merge_blocks(B1, [{'%live',_,_}|B2]) ->
merge_blocks_1(B1++[{set,[],[],stop_here}|B2]).
merge_blocks_1([{set,[],_,stop_here}|Is]) -> Is;
@@ -329,27 +329,27 @@ build_alloc(Words, Floats) -> {alloc,[{words,Words},{floats,Floats}]}.
%% flt_liveness([Instruction]) -> [Instruction]
%% (Re)calculate the number of live registers for each heap allocation
-%% function. We base liveness of the number of live registers at
-%% entry to the instruction sequence.
+%% function. We base liveness of the number of register map at the
+%% beginning of the instruction sequence.
%%
%% A 'not_possible' term will be thrown if the set of live registers
%% is not continous at an allocation function (e.g. if {x,0} and {x,2}
%% are live, but not {x,1}).
-flt_liveness([{'%live',Live}=LiveInstr|Is]) ->
- flt_liveness_1(Is, init_regs(Live), [LiveInstr]).
+flt_liveness([{'%live',_Live,Regs}=LiveInstr|Is]) ->
+ flt_liveness_1(Is, Regs, [LiveInstr]).
-flt_liveness_1([{set,Ds,Ss,{alloc,_,Alloc}}|Is], Regs0, Acc) ->
- Live = live_regs(Regs0),
+flt_liveness_1([{set,Ds,Ss,{alloc,Live0,Alloc}}|Is], Regs0, Acc) ->
+ Live = min(Live0, live_regs(Regs0)),
I = {set,Ds,Ss,{alloc,Live,Alloc}},
- Regs = foldl(fun(R, A) -> set_live(R, A) end, Regs0, Ds),
+ Regs1 = init_regs(Live),
+ Regs = x_live(Ds, Regs1),
flt_liveness_1(Is, Regs, [I|Acc]);
flt_liveness_1([{set,Ds,_,_}=I|Is], Regs0, Acc) ->
- Regs = foldl(fun(R, A) -> set_live(R, A) end, Regs0, Ds),
+ Regs = x_live(Ds, Regs0),
flt_liveness_1(Is, Regs, [I|Acc]);
-flt_liveness_1([{'%live',_}=I|Is], Regs, Acc) ->
- flt_liveness_1(Is, Regs, [I|Acc]);
-flt_liveness_1([], _Regs, Acc) -> reverse(Acc).
+flt_liveness_1([{'%live',_,_}], _Regs, Acc) ->
+ reverse(Acc).
init_regs(Live) ->
(1 bsl Live) - 1.
@@ -364,14 +364,15 @@ live_regs_1(R, N) ->
1 -> live_regs_1(R bsr 1, N+1)
end.
-set_live({x,X}, Regs) -> Regs bor (1 bsl X);
-set_live(_, Regs) -> Regs.
+x_live([{x,N}|Rs], Regs) -> x_live(Rs, Regs bor (1 bsl N));
+x_live([_|Rs], Regs) -> x_live(Rs, Regs);
+x_live([], Regs) -> Regs.
%% update(Instruction, TypeDb) -> NewTypeDb
%% Update the type database to account for executing an instruction.
%%
%% First the cases for instructions inside basic blocks.
-update({'%live',_}, Ts) -> Ts;
+update({'%live',_,_}, Ts) -> Ts;
update({set,[D],[S],move}, Ts) ->
tdb_copy(S, D, Ts);
update({set,[D],[{integer,I},Reg],{bif,element,_}}, Ts0) ->
@@ -469,6 +470,7 @@ is_math_bif(erf, 1) -> true;
is_math_bif(erfc, 1) -> true;
is_math_bif(exp, 1) -> true;
is_math_bif(log, 1) -> true;
+is_math_bif(log2, 1) -> true;
is_math_bif(log10, 1) -> true;
is_math_bif(sqrt, 1) -> true;
is_math_bif(atan2, 2) -> true;
diff --git a/lib/compiler/src/beam_utils.erl b/lib/compiler/src/beam_utils.erl
index e82ba82d38..7704690f86 100644
--- a/lib/compiler/src/beam_utils.erl
+++ b/lib/compiler/src/beam_utils.erl
@@ -26,7 +26,7 @@
code_at/2,bif_to_test/3,is_pure_test/1,
live_opt/1,delete_live_annos/1,combine_heap_needs/2]).
--export([joineven/2,spliteven/1]).
+-export([join_even/2,split_even/1]).
-import(lists, [member/2,sort/1,reverse/1,splitwith/2]).
@@ -187,7 +187,7 @@ is_pure_test({test,is_lt,_,[_,_]}) -> true;
is_pure_test({test,is_nil,_,[_]}) -> true;
is_pure_test({test,is_nonempty_list,_,[_]}) -> true;
is_pure_test({test,test_arity,_,[_,_]}) -> true;
-is_pure_test({test,has_map_fields,_,[_,{list,_}]}) -> true;
+is_pure_test({test,has_map_fields,_,[_|_]}) -> true;
is_pure_test({test,Op,_,Ops}) ->
erl_internal:new_type_test(Op, length(Ops)).
@@ -196,7 +196,7 @@ is_pure_test({test,Op,_,Ops}) ->
%% Go through the instruction sequence in reverse execution
%% order, keep track of liveness and remove 'move' instructions
%% whose destination is a register that will not be used.
-%% Also insert {'%live',Live} annotations at the beginning
+%% Also insert {'%live',Live,Regs} annotations at the beginning
%% and end of each block.
%%
live_opt(Is0) ->
@@ -217,7 +217,7 @@ delete_live_annos([{block,Bl0}|Is]) ->
[] -> delete_live_annos(Is);
[_|_]=Bl -> [{block,Bl}|delete_live_annos(Is)]
end;
-delete_live_annos([{'%live',_}|Is]) ->
+delete_live_annos([{'%live',_,_}|Is]) ->
delete_live_annos(Is);
delete_live_annos([I|Is]) ->
[I|delete_live_annos(Is)];
@@ -366,11 +366,6 @@ check_liveness(R, [{apply,Args}|Is], St) ->
{x,_} -> {killed,St};
{y,_} -> check_liveness(R, Is, St)
end;
-check_liveness({x,R}, [{'%live',Live}|Is], St) ->
- if
- R < Live -> check_liveness(R, Is, St);
- true -> {killed,St}
- end;
check_liveness(R, [{bif,Op,{f,Fail},Ss,D}|Is], St0) ->
case check_liveness_fail(R, Op, Ss, Fail, St0) of
{killed,St} = Killed ->
@@ -554,7 +549,7 @@ check_killed_block(R, [{set,Ds,Ss,_Op}|Is]) ->
false -> check_killed_block(R, Is)
end
end;
-check_killed_block(R, [{'%live',Live}|Is]) ->
+check_killed_block(R, [{'%live',Live,_}|Is]) ->
case R of
{x,X} when X >= Live -> killed;
_ -> check_killed_block(R, Is)
@@ -577,7 +572,7 @@ check_used_block({x,X}=R, [{set,Ds,Ss,{alloc,Live,Op}}|Is], St) ->
end;
check_used_block(R, [{set,Ds,Ss,Op}|Is], St) ->
check_used_block_1(R, Ss, Ds, Op, Is, St);
-check_used_block(R, [{'%live',Live}|Is], St) ->
+check_used_block(R, [{'%live',Live,_}|Is], St) ->
case R of
{x,X} when X >= Live -> {killed,St};
_ -> check_used_block(R, Is, St)
@@ -678,9 +673,9 @@ live_opt([{test,bs_start_match2,Fail,Live,[Src,_],_}=I|Is], _, D, Acc) ->
%% Other instructions.
live_opt([{block,Bl0}|Is], Regs0, D, Acc) ->
- Live0 = {'%live',live_regs(Regs0)},
+ Live0 = {'%live',live_regs(Regs0),Regs0},
{Bl,Regs} = live_opt_block(reverse(Bl0), Regs0, D, [Live0]),
- Live = {'%live',live_regs(Regs)},
+ Live = {'%live',live_regs(Regs),Regs},
live_opt(Is, Regs, D, [{block,[Live|Bl]}|Acc]);
live_opt([{label,L}=I|Is], Regs, D0, Acc) ->
D = gb_trees:insert(L, Regs, D0),
@@ -758,13 +753,9 @@ live_opt([{line,_}=I|Is], Regs, D, Acc) ->
live_opt(Is, Regs, D, [I|Acc]);
%% The following instructions can occur if the "compilation" has been
-%% started from a .S file using the 'asm' option.
+%% started from a .S file using the 'from_asm' option.
live_opt([{trim,_,_}=I|Is], Regs, D, Acc) ->
live_opt(Is, Regs, D, [I|Acc]);
-live_opt([{allocate,_,Live}=I|Is], _, D, Acc) ->
- live_opt(Is, live_call(Live), D, [I|Acc]);
-live_opt([{allocate_heap,_,_,Live}=I|Is], _, D, Acc) ->
- live_opt(Is, live_call(Live), D, [I|Acc]);
live_opt([{'%',_}=I|Is], Regs, D, Acc) ->
live_opt(Is, Regs, D, [I|Acc]);
live_opt([{recv_set,_}=I|Is], Regs, D, Acc) ->
@@ -835,14 +826,14 @@ x_live([], Regs) -> Regs.
is_live(X, Regs) -> ((Regs bsr X) band 1) =:= 1.
-%% spliteven/1
+%% split_even/1
%% [1,2,3,4,5,6] -> {[1,3,5],[2,4,6]}
-spliteven(Rs) -> spliteven(Rs,[],[]).
-spliteven([],Ss,Ds) -> {reverse(Ss),reverse(Ds)};
-spliteven([S,D|Rs],Ss,Ds) ->
- spliteven(Rs,[S|Ss],[D|Ds]).
+split_even(Rs) -> split_even(Rs,[],[]).
+split_even([],Ss,Ds) -> {reverse(Ss),reverse(Ds)};
+split_even([S,D|Rs],Ss,Ds) ->
+ split_even(Rs,[S|Ss],[D|Ds]).
-%% joineven/1
+%% join_even/1
%% {[1,3,5],[2,4,6]} -> [1,2,3,4,5,6]
-joineven([],[]) -> [];
-joineven([S|Ss],[D|Ds]) -> [S,D|joineven(Ss,Ds)].
+join_even([],[]) -> [];
+join_even([S|Ss],[D|Ds]) -> [S,D|join_even(Ss,Ds)].
diff --git a/lib/compiler/src/beam_validator.erl b/lib/compiler/src/beam_validator.erl
index 0acc7a227f..4d4536b79c 100644
--- a/lib/compiler/src/beam_validator.erl
+++ b/lib/compiler/src/beam_validator.erl
@@ -22,7 +22,6 @@
%% Avoid warning for local function error/1 clashing with autoimported BIF.
-compile({no_auto_import,[error/1]}).
--export([file/1, files/1]).
%% Interface for compiler.
-export([module/2, format_error/1]).
@@ -40,38 +39,12 @@
-define(DBG_FORMAT(F, D), ok).
-endif.
-%%%
-%%% API functions.
-%%%
-
--spec file(file:filename()) -> 'ok' | {'error', term()}.
-
-file(Name) when is_list(Name) ->
- case case filename:extension(Name) of
- ".S" -> s_file(Name);
- ".beam" -> beam_file(Name)
- end of
- [] -> ok;
- Es -> {error,Es}
- end.
-
--spec files([file:filename()]) -> 'ok'.
-
-files([F|Fs]) ->
- ?DBG_FORMAT("# Verifying: ~p~n", [F]),
- case file(F) of
- ok -> ok;
- {error,Es} ->
- io:format("~tp:~n~ts~n", [F,format_error(Es)])
- end,
- files(Fs);
-files([]) -> ok.
-
%% To be called by the compiler.
module({Mod,Exp,Attr,Fs,Lc}=Code, _Opts)
when is_atom(Mod), is_list(Exp), is_list(Attr), is_integer(Lc) ->
case validate(Mod, Fs) of
- [] -> {ok,Code};
+ [] ->
+ {ok,Code};
Es0 ->
Es = [{?MODULE,E} || E <- Es0],
{error,[{atom_to_list(Mod),Es}]}
@@ -79,12 +52,6 @@ module({Mod,Exp,Attr,Fs,Lc}=Code, _Opts)
-spec format_error(term()) -> iolist().
-format_error([]) -> [];
-format_error([{{M,F,A},{I,Off,Desc}}|Es]) ->
- [io_lib:format(" ~p:~p/~p+~p:~n ~p - ~p~n",
- [M,F,A,Off,I,Desc])|format_error(Es)];
-format_error([Error|Es]) ->
- [format_error(Error)|format_error(Es)];
format_error({{_M,F,A},{I,Off,limit}}) ->
io_lib:format(
"function ~p/~p+~p:~n"
@@ -103,8 +70,6 @@ format_error({{_M,F,A},{I,Off,Desc}}) ->
" Internal consistency check failed - please report this bug.~n"
" Instruction: ~p~n"
" Error: ~p:~n", [F,A,Off,I,Desc]);
-format_error({Module,Error}) ->
- [Module:format_error(Error)];
format_error(Error) ->
io_lib:format("~p~n", [Error]).
@@ -112,36 +77,6 @@ format_error(Error) ->
%%% Local functions follow.
%%%
-s_file(Name) ->
- {ok,Is} = file:consult(Name),
- {module,Module} = lists:keyfind(module, 1, Is),
- Fs = find_functions(Is),
- validate(Module, Fs).
-
-find_functions(Fs) ->
- find_functions_1(Fs, none, [], []).
-
-find_functions_1([{function,Name,Arity,Entry}|Is], Func, FuncAcc, Acc0) ->
- Acc = add_func(Func, FuncAcc, Acc0),
- find_functions_1(Is, {Name,Arity,Entry}, [], Acc);
-find_functions_1([I|Is], Func, FuncAcc, Acc) ->
- find_functions_1(Is, Func, [I|FuncAcc], Acc);
-find_functions_1([], Func, FuncAcc, Acc) ->
- reverse(add_func(Func, FuncAcc, Acc)).
-
-add_func(none, _, Acc) -> Acc;
-add_func({Name,Arity,Entry}, Is, Acc) ->
- [{function,Name,Arity,Entry,reverse(Is)}|Acc].
-
-beam_file(Name) ->
- try beam_disasm:file(Name) of
- {error,beam_lib,Reason} -> [{beam_lib,Reason}];
- #beam_file{module=Module, code=Code0} ->
- Code = normalize_disassembled_code(Code0),
- validate(Module, Code)
- catch _:_ -> [disassembly_failed]
- end.
-
%%%
%%% The validator follows.
%%%
@@ -196,23 +131,16 @@ validate_0(Module, [{function,Name,Ar,Entry,Code}|Fs], Ft) ->
try validate_1(Code, Name, Ar, Entry, Ft) of
_ -> validate_0(Module, Fs, Ft)
catch
- Error ->
+ throw:Error ->
+ %% Controlled error.
[Error|validate_0(Module, Fs, Ft)];
- error:Error ->
- [validate_error(Error, Module, Name, Ar)|validate_0(Module, Fs, Ft)]
+ Class:Error ->
+ %% Crash.
+ Stack = erlang:get_stacktrace(),
+ io:fwrite("Function: ~w/~w\n", [Name,Ar]),
+ erlang:raise(Class, Error, Stack)
end.
--ifdef(DEBUG).
-validate_error(Error, Module, Name, Ar) ->
- exit(validate_error_1(Error, Module, Name, Ar)).
--else.
-validate_error(Error, Module, Name, Ar) ->
- validate_error_1(Error, Module, Name, Ar).
--endif.
-validate_error_1(Error, Module, Name, Ar) ->
- {{Module,Name,Ar},
- {internal_error,'_',{Error,erlang:get_stacktrace()}}}.
-
-type index() :: non_neg_integer().
-type reg_tab() :: gb_trees:tree(index(), 'none' | {'value', _}).
@@ -225,7 +153,6 @@ validate_error_1(Error, Module, Name, Ar) ->
hf=0, %Available heap size for floats.
fls=undefined, %Floating point state.
ct=[], %List of hot catch/try labels
- bsm=undefined, %Bit syntax matching state.
bits=undefined, %Number of bits in bit syntax binary.
setelem=false %Previous instruction was setelement/3.
}).
@@ -308,7 +235,7 @@ labels_1([{label,L}|Is], R) ->
labels_1([{line,_}|Is], R) ->
labels_1(Is, R);
labels_1(Is, R) ->
- {lists:reverse(R),Is}.
+ {reverse(R),Is}.
init_state(Arity) ->
Xs = init_regs(Arity, term),
@@ -403,10 +330,6 @@ valfun_1({init,{y,_}=Reg}, Vst) ->
set_type_y(initialized, Reg, Vst);
valfun_1({test_heap,Heap,Live}, Vst) ->
test_heap(Heap, Live, Vst);
-valfun_1({bif,_Op,nofail,Src,Dst}, Vst) ->
- %% The 'nofail' atom only occurs in disassembled code.
- validate_src(Src, Vst),
- set_type_reg(term, Dst, Vst);
valfun_1({bif,Op,{f,_},Src,Dst}=I, Vst) ->
case is_bif_safe(Op, length(Src)) of
false ->
@@ -432,18 +355,12 @@ valfun_1({put_tuple,Sz,Dst}, Vst0) when is_integer(Sz) ->
valfun_1({put,Src}, Vst) ->
assert_term(Src, Vst),
eat_heap(1, Vst);
-valfun_1({put_string,Sz,_,Dst}, Vst0) when is_integer(Sz) ->
- Vst = eat_heap(2*Sz, Vst0),
- set_type_reg(cons, Dst, Vst);
%% Instructions for optimization of selective receives.
valfun_1({recv_mark,{f,Fail}}, Vst) when is_integer(Fail) ->
Vst;
valfun_1({recv_set,{f,Fail}}, Vst) when is_integer(Fail) ->
Vst;
%% Misc.
-valfun_1({'%live',Live}, Vst) ->
- verify_live(Live, Vst),
- Vst;
valfun_1(remove_message, Vst) ->
Vst;
valfun_1({'%',_}, Vst) ->
@@ -602,8 +519,6 @@ valfun_4({call_ext_last,Live,Func,StkSize},
tail_call(Func, Live, Vst);
valfun_4({call_ext_last,_,_,_}, #vst{current=#st{numy=NumY}}) ->
error({allocated,NumY});
-valfun_4({make_fun,_,_,Live}, Vst) ->
- call('fun', Live, Vst);
valfun_4({make_fun2,_,_,_,Live}, Vst) ->
call(make_fun, Live, Vst);
%% Other BIFs
@@ -620,8 +535,6 @@ valfun_4({bif,element,{f,Fail},[Pos,Tuple],Dst}, Vst0) ->
TupleType = upgrade_tuple_type({tuple,[get_tuple_size(PosType)]}, TupleType0),
Vst = set_type(TupleType, Tuple, Vst1),
set_type_reg(term, Dst, Vst);
-valfun_4({raise,{f,_}=Fail,Src,Dst}, Vst) ->
- valfun_4({bif,raise,Fail,Src,Dst}, Vst);
valfun_4({bif,Op,{f,Fail},Src,Dst}, Vst0) ->
validate_src(Src, Vst0),
Vst = branch_state(Fail, Vst0),
@@ -738,32 +651,6 @@ valfun_4({bs_save2,Ctx,SavePoint}, Vst) ->
valfun_4({bs_restore2,Ctx,SavePoint}, Vst) ->
bsm_restore(Ctx, SavePoint, Vst);
-%% Bit syntax instructions.
-valfun_4({bs_start_match,{f,_Fail}=F,Src}, Vst) ->
- valfun_4({test,bs_start_match,F,[Src]}, Vst);
-valfun_4({test,bs_start_match,{f,Fail},[Src]}, Vst) ->
- assert_term(Src, Vst),
- bs_start_match(branch_state(Fail, Vst));
-
-valfun_4({bs_save,SavePoint}, Vst) ->
- bs_assert_state(Vst),
- bs_save(SavePoint, Vst);
-valfun_4({bs_restore,SavePoint}, Vst) ->
- bs_assert_state(Vst),
- bs_assert_savepoint(SavePoint, Vst),
- Vst;
-valfun_4({test,bs_skip_bits,{f,Fail},[Src,_,_]}, Vst) ->
- bs_assert_state(Vst),
- assert_term(Src, Vst),
- branch_state(Fail, Vst);
-valfun_4({test,bs_test_tail,{f,Fail},_}, Vst) ->
- bs_assert_state(Vst),
- branch_state(Fail, Vst);
-valfun_4({test,_,{f,Fail},[_,_,_,Dst]}, Vst0) ->
- bs_assert_state(Vst0),
- Vst = branch_state(Fail, Vst0),
- set_type_reg({integer,[]}, Dst, Vst);
-
%% Other test instructions.
valfun_4({test,is_float,{f,Lbl},[Float]}, Vst) ->
assert_term(Float, Vst),
@@ -779,9 +666,17 @@ valfun_4({test,test_arity,{f,Lbl},[Tuple,Sz]}, Vst) when is_integer(Sz) ->
assert_type(tuple, Tuple, Vst),
set_type_reg({tuple,Sz}, Tuple, branch_state(Lbl, Vst));
valfun_4({test,has_map_fields,{f,Lbl},Src,{list,List}}, Vst) ->
- validate_src([Src], Vst),
+ assert_type(map, Src, Vst),
assert_strict_literal_termorder(List),
branch_state(Lbl, Vst);
+valfun_4({test,is_map,{f,Lbl},[Src]}, Vst0) ->
+ Vst = branch_state(Lbl, Vst0),
+ case Src of
+ {Tag,_} when Tag =:= x; Tag =:= y ->
+ set_type_reg(map, Src, Vst);
+ _ ->
+ Vst
+ end;
valfun_4({test,_Op,{f,Lbl},Src}, Vst) ->
validate_src(Src, Vst),
branch_state(Lbl, Vst);
@@ -795,9 +690,6 @@ valfun_4({bs_utf8_size,{f,Fail},A,Dst}, Vst) ->
valfun_4({bs_utf16_size,{f,Fail},A,Dst}, Vst) ->
assert_term(A, Vst),
set_type_reg({integer,[]}, Dst, branch_state(Fail, Vst));
-valfun_4({bs_bits_to_bytes,{f,Fail},Src,Dst}, Vst) ->
- assert_term(Src, Vst),
- set_type_reg({integer,[]}, Dst, branch_state(Fail, Vst));
valfun_4({bs_init2,{f,Fail},Sz,Heap,Live,_,Dst}, Vst0) ->
verify_live(Live, Vst0),
if
@@ -868,16 +760,6 @@ valfun_4({bs_put_utf32,{f,Fail},_,Src}=I, Vst0) ->
assert_term(Src, Vst0),
Vst = bs_align_check(I, Vst0),
branch_state(Fail, Vst);
-%% Old bit syntax construction (before R10B).
-valfun_4({bs_init,_,_}, Vst) ->
- bs_zero_bits(Vst);
-valfun_4({bs_need_buf,_}, Vst) -> Vst;
-valfun_4({bs_final,{f,Fail},Dst}, Vst0) ->
- Vst = branch_state(Fail, Vst0),
- set_type_reg(binary, Dst, Vst);
-valfun_4({bs_final2,Src,Dst}, Vst0) ->
- assert_term(Src, Vst0),
- set_type_reg(binary, Dst, Vst0);
%% Map instructions.
valfun_4({put_map_assoc,{f,Fail},Src,Dst,Live,{list,List}}, Vst) ->
verify_put_map(Fail, Src, Dst, Live, List, Vst);
@@ -889,26 +771,30 @@ valfun_4(_, _) ->
error(unknown_instruction).
verify_get_map(Fail, Src, List, Vst0) ->
- assert_term(Src, Vst0),
+ assert_type(map, Src, Vst0),
Vst1 = branch_state(Fail, Vst0),
- Lits = mmap(fun(L,_R) -> [L] end, List),
- assert_strict_literal_termorder(Lits),
+ Keys = extract_map_keys(List),
+ assert_strict_literal_termorder(Keys),
verify_get_map_pair(List,Vst0,Vst1).
+extract_map_keys([Key,_Val|T]) ->
+ [Key|extract_map_keys(T)];
+extract_map_keys([]) -> [].
+
verify_get_map_pair([],_,Vst) -> Vst;
verify_get_map_pair([Src,Dst|Vs],Vst0,Vsti) ->
assert_term(Src, Vst0),
verify_get_map_pair(Vs,Vst0,set_type_reg(term,Dst,Vsti)).
verify_put_map(Fail, Src, Dst, Live, List, Vst0) ->
+ assert_type(map, Src, Vst0),
verify_live(Live, Vst0),
verify_y_init(Vst0),
foreach(fun (Term) -> assert_term(Term, Vst0) end, List),
- assert_term(Src, Vst0),
Vst1 = heap_alloc(0, Vst0),
Vst2 = branch_state(Fail, Vst1),
Vst = prune_x_regs(Live, Vst2),
- set_type_reg(term, Dst, Vst).
+ set_type_reg(map, Dst, Vst).
%%
%% Common code for validating bs_get* instructions.
@@ -936,9 +822,6 @@ validate_bs_skip_utf(Fail, Ctx, Live, Vst0) ->
%%
val_dsetel({move,_,_}, Vst) ->
Vst;
-val_dsetel({put_string,0,{string,""},_}, Vst) ->
- %% An empty string is OK since it doesn't build anything.
- Vst;
val_dsetel({call_ext,3,{extfunc,erlang,setelement,3}}, #vst{current=St}=Vst) ->
Vst#vst{current=St#st{setelem=true}};
val_dsetel({set_tuple_element,_,_,_}, #vst{current=#st{setelem=false}}) ->
@@ -972,7 +855,7 @@ call(Name, Live, #vst{current=St}=Vst) ->
Type when Type =/= exception ->
%% Type is never 'exception' because it has been handled earlier.
Xs = gb_trees_from_list([{0,Type}]),
- Vst#vst{current=St#st{x=Xs,f=init_fregs(),bsm=undefined}}
+ Vst#vst{current=St#st{x=Xs,f=init_fregs()}}
end.
%% Tail call.
@@ -1030,7 +913,7 @@ allocate(_, _, _, _, #vst{current=#st{numy=Numy}}) ->
error({existing_stack_frame,{size,Numy}}).
deallocate(#vst{current=St}=Vst) ->
- Vst#vst{current=St#st{y=init_regs(0, initialized),numy=none,bsm=undefined}}.
+ Vst#vst{current=St#st{y=init_regs(0, initialized),numy=none}}.
test_heap(Heap, Live, Vst0) ->
verify_live(Live, Vst0),
@@ -1038,7 +921,7 @@ test_heap(Heap, Live, Vst0) ->
heap_alloc(Heap, Vst).
heap_alloc(Heap, #vst{current=St0}=Vst) ->
- St1 = kill_heap_allocation(St0#st{bsm=undefined}),
+ St1 = kill_heap_allocation(St0),
St = heap_alloc_1(Heap, St1),
Vst#vst{current=St}.
@@ -1122,74 +1005,30 @@ assert_freg_set(Fr, _) -> error({bad_source,Fr}).
%%% Maps
-%% ensure that a list of literals has a strict
-%% ascending term order (also meaning unique literals).
-%% Single item lists may have registers.
-assert_strict_literal_termorder([_]) -> ok;
-assert_strict_literal_termorder(Ls) ->
- Vs = lists:map(fun (L) -> get_literal(L) end, Ls),
+%% A single item list may be either a list or a register.
+%%
+%% A list with more than item must contain literals in
+%% ascending term order.
+%%
+%% An empty list is not allowed.
+
+assert_strict_literal_termorder([]) ->
+ %% There is no reason to use the get_map_elements and
+ %% has_map_fields instructions with empty lists.
+ error(empty_field_list);
+assert_strict_literal_termorder([_]) ->
+ ok;
+assert_strict_literal_termorder([_,_|_]=Ls) ->
+ Vs = [get_literal(L) || L <- Ls],
case check_strict_value_termorder(Vs) of
true -> ok;
- false -> error({not_strict_order, Ls})
+ false -> error(not_strict_order)
end.
-%% usage:
-%% mmap(fun(A,B) -> [{A,B}] end, [1,2,3,4]),
-%% [{1,2},{3,4}]
-
-mmap(F,List) ->
- {arity,Ar} = erlang:fun_info(F,arity),
- mmap(F,Ar,List).
-mmap(_F,_,[]) -> [];
-mmap(F,Ar,List) ->
- {Hd,Tl} = lists:split(Ar,List),
- apply(F,Hd) ++ mmap(F,Ar,Tl).
-
-check_strict_value_termorder([]) -> true;
-check_strict_value_termorder([_]) -> true;
-check_strict_value_termorder([V1,V2]) ->
- erts_internal:cmp_term(V1,V2) < 0;
-check_strict_value_termorder([V1,V2|Vs]) ->
- case erts_internal:cmp_term(V1,V2) < 0 of
- true -> check_strict_value_termorder([V2|Vs]);
- false -> false
- end.
-
-%%%
-%%% Binary matching.
-%%%
-%%% Possible values for the bsm field (=bit syntax matching state).
-%%%
-%%% undefined - Undefined (initial state). No matching instructions allowed.
-%%%
-%%% (gb set) - The gb set contains the defined save points.
-%%%
-%%% The bsm field is reset to 'undefined' by instructions that may cause a
-%%% a garbage collection (might move the binary) and/or context switch
-%%% (may invalidate the save points).
-
-bs_start_match(#vst{current=#st{bsm=undefined}=St}=Vst) ->
- Vst#vst{current=St#st{bsm=gb_sets:empty()}};
-bs_start_match(Vst) ->
- %% Must retain save points here - it is possible to restore back
- %% to a previous binary.
- Vst.
-
-bs_save(Reg, #vst{current=#st{bsm=Saved}=St}=Vst)
- when is_integer(Reg), Reg < ?MAXREG ->
- Vst#vst{current=St#st{bsm=gb_sets:add(Reg, Saved)}};
-bs_save(_, _) -> error(limit).
-
-bs_assert_savepoint(Reg, #vst{current=#st{bsm=Saved}}) ->
- case gb_sets:is_member(Reg, Saved) of
- false -> error({no_save_point,Reg});
- true -> ok
- end.
-
-bs_assert_state(#vst{current=#st{bsm=undefined}}) ->
- error(no_bs_match_state);
-bs_assert_state(_) -> ok.
-
+check_strict_value_termorder([V1|[V2|_]=Vs]) ->
+ erts_internal:cmp_term(V1, V2) < 0 andalso
+ check_strict_value_termorder(Vs);
+check_strict_value_termorder([_]) -> true.
%%%
%%% New binary matching instructions.
@@ -1389,7 +1228,8 @@ assert_term(Src, Vst) ->
%%
%% number Integer or Float of unknown value
%%
-
+%% map Map.
+%%
assert_type(WantedType, Term, Vst) ->
assert_type(WantedType, get_term_type(Term, Vst)).
@@ -1471,6 +1311,7 @@ get_term_type_1(nil=T, _) -> T;
get_term_type_1({atom,A}=T, _) when is_atom(A) -> T;
get_term_type_1({float,F}=T, _) when is_float(F) -> T;
get_term_type_1({integer,I}=T, _) when is_integer(I) -> T;
+get_term_type_1({literal,Map}, _) when is_map(Map) -> map;
get_term_type_1({literal,_}=T, _) -> T;
get_term_type_1({x,X}=Reg, #vst{current=#st{x=Xs}}) when is_integer(X) ->
case gb_trees:lookup(X, Xs) of
@@ -1525,14 +1366,13 @@ merge_states(L, St, Branched) when L =/= 0 ->
{value,OtherSt} -> merge_states_1(St, OtherSt)
end.
-merge_states_1(#st{x=Xs0,y=Ys0,numy=NumY0,h=H0,ct=Ct0,bsm=Bsm0}=St,
- #st{x=Xs1,y=Ys1,numy=NumY1,h=H1,ct=Ct1,bsm=Bsm1}) ->
+merge_states_1(#st{x=Xs0,y=Ys0,numy=NumY0,h=H0,ct=Ct0}=St,
+ #st{x=Xs1,y=Ys1,numy=NumY1,h=H1,ct=Ct1}) ->
NumY = merge_stk(NumY0, NumY1),
Xs = merge_regs(Xs0, Xs1),
Ys = merge_y_regs(Ys0, Ys1),
Ct = merge_ct(Ct0, Ct1),
- Bsm = merge_bsm(Bsm0, Bsm1),
- St#st{x=Xs,y=Ys,numy=NumY,h=min(H0, H1),ct=Ct,bsm=Bsm}.
+ St#st{x=Xs,y=Ys,numy=NumY,h=min(H0, H1),ct=Ct}.
merge_stk(S, S) -> S;
merge_stk(_, _) -> undecided.
@@ -1615,10 +1455,6 @@ merge_types(T1, T2) when T1 =/= T2 ->
%% Too different. All we know is that the type is a 'term'.
term.
-merge_bsm(undefined, _) -> undefined;
-merge_bsm(_, undefined) -> undefined;
-merge_bsm(Bsm0, Bsm1) -> gb_sets:intersection(Bsm0, Bsm1).
-
tuple_sz([Sz]) -> Sz;
tuple_sz(Sz) -> Sz.
@@ -1725,6 +1561,7 @@ bif_type(is_float, [_], _) -> bool;
bif_type(is_function, [_], _) -> bool;
bif_type(is_integer, [_], _) -> bool;
bif_type(is_list, [_], _) -> bool;
+bif_type(is_map, [_], _) -> bool;
bif_type(is_number, [_], _) -> bool;
bif_type(is_pid, [_], _) -> bool;
bif_type(is_port, [_], _) -> bool;
@@ -1754,6 +1591,7 @@ is_bif_safe(is_float, 1) -> true;
is_bif_safe(is_function, 1) -> true;
is_bif_safe(is_integer, 1) -> true;
is_bif_safe(is_list, 1) -> true;
+is_bif_safe(is_map, 1) -> true;
is_bif_safe(is_number, 1) -> true;
is_bif_safe(is_pid, 1) -> true;
is_bif_safe(is_port, 1) -> true;
@@ -1818,6 +1656,7 @@ return_type_math(erf, 1) -> {float,[]};
return_type_math(erfc, 1) -> {float,[]};
return_type_math(exp, 1) -> {float,[]};
return_type_math(log, 1) -> {float,[]};
+return_type_math(log2, 1) -> {float,[]};
return_type_math(log10, 1) -> {float,[]};
return_type_math(sqrt, 1) -> {float,[]};
return_type_math(atan2, 2) -> {float,[]};
@@ -1839,52 +1678,3 @@ error(Error) -> exit(Error).
-else.
error(Error) -> throw(Error).
-endif.
-
-
-%%%
-%%% Rewrite disassembled code to the same format as we used internally
-%%% to not have to worry later.
-%%%
-
-normalize_disassembled_code(Fs) ->
- Index = ndc_index(Fs, []),
- ndc(Fs, Index, []).
-
-ndc_index([{function,Name,Arity,Entry,_Code}|Fs], Acc) ->
- ndc_index(Fs, [{{Name,Arity},Entry}|Acc]);
-ndc_index([], Acc) ->
- gb_trees:from_orddict(lists:sort(Acc)).
-
-ndc([{function,Name,Arity,Entry,Code0}|Fs], D, Acc) ->
- Code = ndc_1(Code0, D, []),
- ndc(Fs, D, [{function,Name,Arity,Entry,Code}|Acc]);
-ndc([], _, Acc) -> reverse(Acc).
-
-ndc_1([{call=Op,A,{_,F,A}}|Is], D, Acc) ->
- ndc_1(Is, D, [{Op,A,{f,gb_trees:get({F,A}, D)}}|Acc]);
-ndc_1([{call_only=Op,A,{_,F,A}}|Is], D, Acc) ->
- ndc_1(Is, D, [{Op,A,{f,gb_trees:get({F,A}, D)}}|Acc]);
-ndc_1([{call_last=Op,A,{_,F,A},Sz}|Is], D, Acc) ->
- ndc_1(Is, D, [{Op,A,{f,gb_trees:get({F,A}, D)},Sz}|Acc]);
-ndc_1([{arithbif,Op,F,Src,Dst}|Is], D, Acc) ->
- ndc_1(Is, D, [{bif,Op,F,Src,Dst}|Acc]);
-ndc_1([{arithfbif,Op,F,Src,Dst}|Is], D, Acc) ->
- ndc_1(Is, D, [{bif,Op,F,Src,Dst}|Acc]);
-ndc_1([{test,bs_start_match2=Op,F,[A1,Live,A3,Dst]}|Is], D, Acc) ->
- ndc_1(Is, D, [{test,Op,F,Live,[A1,A3],Dst}|Acc]);
-ndc_1([{test,bs_get_binary2=Op,F,[A1,Live,A3,A4,A5,Dst]}|Is], D, Acc) ->
- ndc_1(Is, D, [{test,Op,F,Live,[A1,A3,A4,A5],Dst}|Acc]);
-ndc_1([{test,bs_get_float2=Op,F,[A1,Live,A3,A4,A5,Dst]}|Is], D, Acc) ->
- ndc_1(Is, D, [{test,Op,F,Live,[A1,A3,A4,A5],Dst}|Acc]);
-ndc_1([{test,bs_get_integer2=Op,F,[A1,Live,A3,A4,A5,Dst]}|Is], D, Acc) ->
- ndc_1(Is, D, [{test,Op,F,Live,[A1,A3,A4,A5],Dst}|Acc]);
-ndc_1([{test,bs_get_utf8=Op,F,[A1,Live,A3,Dst]}|Is], D, Acc) ->
- ndc_1(Is, D, [{test,Op,F,Live,[A1,A3],Dst}|Acc]);
-ndc_1([{test,bs_get_utf16=Op,F,[A1,Live,A3,Dst]}|Is], D, Acc) ->
- ndc_1(Is, D, [{test,Op,F,Live,[A1,A3],Dst}|Acc]);
-ndc_1([{test,bs_get_utf32=Op,F,[A1,Live,A3,Dst]}|Is], D, Acc) ->
- ndc_1(Is, D, [{test,Op,F,Live,[A1,A3],Dst}|Acc]);
-ndc_1([I|Is], D, Acc) ->
- ndc_1(Is, D, [I|Acc]);
-ndc_1([], _, Acc) ->
- reverse(Acc).
diff --git a/lib/compiler/src/beam_z.erl b/lib/compiler/src/beam_z.erl
index c2a6ef604e..0c7bef9183 100644
--- a/lib/compiler/src/beam_z.erl
+++ b/lib/compiler/src/beam_z.erl
@@ -79,17 +79,9 @@ undo_rename({put_map,Fail,assoc,S,D,R,L}) ->
undo_rename({put_map,Fail,exact,S,D,R,L}) ->
{put_map_exact,Fail,S,D,R,L};
undo_rename({test,has_map_fields,Fail,[Src|List]}) ->
- {test,has_map_fields,Fail,Src,{list,[to_typed_literal(V)||V<-List]}};
-undo_rename({get_map_elements,Fail,Src,{list, List}}) ->
- {get_map_elements,Fail,Src,{list,[to_typed_literal(V)||V<-List]}};
+ {test,has_map_fields,Fail,Src,{list,List}};
+undo_rename({get_map_elements,Fail,Src,{list,List}}) ->
+ {get_map_elements,Fail,Src,{list,List}};
undo_rename({select,I,Reg,Fail,List}) ->
{I,Reg,Fail,{list,List}};
undo_rename(I) -> I.
-
-%% to_typed_literal(Arg)
-%% transform Arg to specific literal i.e. float | integer | atom if applicable
-to_typed_literal({literal, V}) when is_float(V) -> {float, V};
-to_typed_literal({literal, V}) when is_atom(V) -> {atom, V};
-to_typed_literal({literal, V}) when is_integer(V) -> {integer, V};
-to_typed_literal({literal, []}) -> nil;
-to_typed_literal(V) -> V.
diff --git a/lib/compiler/src/cerl.erl b/lib/compiler/src/cerl.erl
index 7a2c3d70de..3d4b9ee0c6 100644
--- a/lib/compiler/src/cerl.erl
+++ b/lib/compiler/src/cerl.erl
@@ -123,11 +123,14 @@
bitstr_flags/1,
%% keep map exports here for now
+ c_map_pattern/1,
+ is_c_map/1,
map_es/1,
map_arg/1,
update_c_map/3,
c_map/1, is_c_map_empty/1,
ann_c_map/2, ann_c_map/3,
+ ann_c_map_pattern/2,
map_pair_op/1,map_pair_key/1,map_pair_val/1,
update_c_map_pair/4,
c_map_pair/2,
@@ -431,6 +434,8 @@ is_literal_term([H | T]) ->
is_literal_term(T) when is_tuple(T) ->
is_literal_term_list(tuple_to_list(T));
is_literal_term(B) when is_bitstring(B) -> true;
+is_literal_term(M) when is_map(M) ->
+ is_literal_term_list(maps:to_list(M));
is_literal_term(_) ->
false.
@@ -1577,6 +1582,20 @@ ann_make_list(_, [], Node) ->
%% ---------------------------------------------------------------------
%% maps
+%% @spec is_c_map(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% map constructor, otherwise <code>false</code>.
+
+-spec is_c_map(cerl()) -> boolean().
+
+is_c_map(#c_map{}) ->
+ true;
+is_c_map(#c_literal{val = V}) when is_map(V) ->
+ true;
+is_c_map(_) ->
+ false.
+
-spec map_es(c_map()) -> [c_map_pair()].
map_es(#c_map{es = Es}) ->
@@ -1590,7 +1609,17 @@ map_arg(#c_map{arg=M}) ->
-spec c_map([c_map_pair()]) -> c_map().
c_map(Pairs) ->
- #c_map{es=Pairs}.
+ ann_c_map([], Pairs).
+
+-spec c_map_pattern([c_map_pair()]) -> c_map().
+
+c_map_pattern(Pairs) ->
+ #c_map{es=Pairs, is_pat=true}.
+
+-spec ann_c_map_pattern([term()], [c_map_pair()]) -> c_map().
+
+ann_c_map_pattern(As, Pairs) ->
+ #c_map{anno=As, es=Pairs, is_pat=true}.
-spec is_c_map_empty(c_map() | c_literal()) -> boolean().
@@ -1598,25 +1627,13 @@ is_c_map_empty(#c_map{ es=[] }) -> true;
is_c_map_empty(#c_literal{val=M}) when is_map(M),map_size(M) =:= 0 -> true;
is_c_map_empty(_) -> false.
--spec ann_c_map([term()], [cerl()]) -> c_map() | c_literal().
+-spec ann_c_map([term()], [c_map_pair()]) -> c_map() | c_literal().
-ann_c_map(As,Es) ->
+ann_c_map(As, Es) ->
ann_c_map(As, #c_literal{val=#{}}, Es).
-spec ann_c_map([term()], c_map() | c_literal(), [c_map_pair()]) -> c_map() | c_literal().
-ann_c_map(As,#c_literal{val=Mval}=M,Es) when is_map(Mval), map_size(Mval) =:= 0 ->
- Pairs = [[Ck,Cv]||#c_map_pair{key=Ck,val=Cv}<-Es],
- IsLit = lists:foldl(fun(Pair,Res) ->
- Res andalso is_lit_list(Pair)
- end, true, Pairs),
- Fun = fun(Pair) -> [K,V] = lit_list_vals(Pair), {K,V} end,
- case IsLit of
- false ->
- #c_map{arg=M, es=Es, anno=As };
- true ->
- #c_literal{anno=As, val=maps:from_list(lists:map(Fun, Pairs))}
- end;
ann_c_map(As,#c_literal{val=M},Es) when is_map(M) ->
fold_map_pairs(As,Es,M);
ann_c_map(As,M,Es) ->
@@ -1644,14 +1661,14 @@ fold_map_pairs(As,[#c_map_pair{op=#c_literal{val=exact},key=Ck,val=Cv}=E|Es],M)
end;
false ->
#c_map{arg=#c_literal{val=M,anno=As}, es=[E|Es], anno=As }
- end;
-fold_map_pairs(As,Es,M) ->
- #c_map{arg=#c_literal{val=M,anno=As}, es=Es, anno=As }.
+ end.
-%-spec update_c_map(c_map() | c_literal(), [c_map_pair()]) -> c_map() | c_literal().
+-spec update_c_map(c_map(), cerl(), [cerl()]) -> c_map() | c_literal().
-update_c_map(Old,M,Es) ->
- #c_map{arg=M, es = Es, anno = get_ann(Old)}.
+update_c_map(#c_map{is_pat=true}=Old, M, Es) ->
+ Old#c_map{arg=M, es=Es};
+update_c_map(#c_map{is_pat=false}=Old, M, Es) ->
+ ann_c_map(get_ann(Old), M, Es).
map_pair_key(#c_map_pair{key=K}) -> K.
map_pair_val(#c_map_pair{val=V}) -> V.
diff --git a/lib/compiler/src/cerl_clauses.erl b/lib/compiler/src/cerl_clauses.erl
index 87bd47c08b..ef74c5b76f 100644
--- a/lib/compiler/src/cerl_clauses.erl
+++ b/lib/compiler/src/cerl_clauses.erl
@@ -354,29 +354,29 @@ match(P, E, Bs) ->
{false, Bs}
end
end;
- map ->
- %% The most we can do is to say "definitely no match" if a
- %% map pattern is matched against non-map data.
- case E of
- any ->
- {false, Bs};
- _ ->
- case type(E) of
- literal ->
- case is_map(concrete(E)) of
- false ->
- none;
- true ->
- {false, Bs}
- end;
- cons ->
- none;
- tuple ->
- none;
- _ ->
- {false, Bs}
- end
- end;
+ map ->
+ %% The most we can do is to say "definitely no match" if a
+ %% map pattern is matched against non-map data.
+ case E of
+ any ->
+ {false, Bs};
+ _ ->
+ case type(E) of
+ literal ->
+ case is_map(concrete(E)) of
+ false ->
+ none;
+ true ->
+ {false, Bs}
+ end;
+ cons ->
+ none;
+ tuple ->
+ none;
+ _ ->
+ {false, Bs}
+ end
+ end;
_ ->
match_1(P, E, Bs)
end.
diff --git a/lib/compiler/src/compiler.app.src b/lib/compiler/src/compiler.app.src
index 8f68915f8e..fbaa7a96fe 100644
--- a/lib/compiler/src/compiler.app.src
+++ b/lib/compiler/src/compiler.app.src
@@ -56,6 +56,7 @@
rec_env,
sys_core_dsetel,
sys_core_fold,
+ sys_core_fold_lists,
sys_core_inline,
sys_pre_attributes,
sys_pre_expand,
diff --git a/lib/compiler/src/core_lib.erl b/lib/compiler/src/core_lib.erl
index 2792fd8fa5..66319dbd36 100644
--- a/lib/compiler/src/core_lib.erl
+++ b/lib/compiler/src/core_lib.erl
@@ -20,6 +20,12 @@
-module(core_lib).
+-deprecated({get_anno,1,next_major_release}).
+-deprecated({set_anno,2,next_major_release}).
+-deprecated({is_literal,1,next_major_release}).
+-deprecated({is_literal_list,1,next_major_release}).
+-deprecated({literal_value,1,next_major_release}).
+
-export([get_anno/1,set_anno/2]).
-export([is_literal/1,is_literal_list/1]).
-export([literal_value/1]).
@@ -33,59 +39,27 @@
%%
-spec get_anno(cerl:cerl()) -> term().
-get_anno(C) -> element(2, C).
+get_anno(C) -> cerl:get_ann(C).
-spec set_anno(cerl:cerl(), term()) -> cerl:cerl().
-set_anno(C, A) -> setelement(2, C, A).
+set_anno(C, A) -> cerl:set_ann(C, A).
-spec is_literal(cerl:cerl()) -> boolean().
-is_literal(#c_literal{}) -> true;
-is_literal(#c_cons{hd=H,tl=T}) ->
- is_literal(H) andalso is_literal(T);
-is_literal(#c_tuple{es=Es}) -> is_literal_list(Es);
-is_literal(#c_binary{segments=Es}) -> is_lit_bin(Es);
-is_literal(_) -> false.
+is_literal(Cerl) ->
+ cerl:is_literal(cerl:fold_literal(Cerl)).
-spec is_literal_list([cerl:cerl()]) -> boolean().
is_literal_list(Es) -> lists:all(fun is_literal/1, Es).
-is_lit_bin(Es) ->
- lists:all(fun (#c_bitstr{val=E,size=S}) ->
- is_literal(E) andalso is_literal(S)
- end, Es).
-
%% Return the value of LitExpr.
-spec literal_value(cerl:c_literal() | cerl:c_binary() |
cerl:c_map() | cerl:c_cons() | cerl:c_tuple()) -> term().
-literal_value(#c_literal{val=V}) -> V;
-literal_value(#c_binary{segments=Es}) ->
- list_to_binary([literal_value_bin(Bit) || Bit <- Es]);
-literal_value(#c_cons{hd=H,tl=T}) ->
- [literal_value(H)|literal_value(T)];
-literal_value(#c_tuple{es=Es}) ->
- list_to_tuple(literal_value_list(Es));
-literal_value(#c_map{arg=Cm,es=Cmps}) ->
- M = literal_value(Cm),
- lists:foldl(fun(#c_map_pair{ key=Ck, val=Cv },Mi) ->
- K = literal_value(Ck),
- V = literal_value(Cv),
- maps:put(K,V,Mi)
- end, M, Cmps).
-
-literal_value_list(Vals) -> [literal_value(V) || V <- Vals].
-
-literal_value_bin(#c_bitstr{val=Val,size=Sz,unit=U,type=T,flags=Fs}) ->
- %% We will only handle literals constructed by make_literal/1.
- %% Could be made more general in the future if the need arises.
- 8 = literal_value(Sz),
- 1 = literal_value(U),
- integer = literal_value(T),
- [unsigned,big] = literal_value(Fs),
- literal_value(Val).
+literal_value(Cerl) ->
+ cerl:concrete(cerl:fold_literal(Cerl)).
%% Make a suitable values structure, expr or values, depending on Expr.
-spec make_values([cerl:cerl()] | cerl:cerl()) -> cerl:cerl().
@@ -212,6 +186,8 @@ vu_pattern(V, #c_tuple{es=Es}, St) ->
vu_pattern_list(V, Es, St);
vu_pattern(V, #c_binary{segments=Ss}, St) ->
vu_pat_seg_list(V, Ss, St);
+vu_pattern(V, #c_map{es=Es}, St) ->
+ vu_map_pairs(V, Es, St);
vu_pattern(V, #c_alias{var=Var,pat=P}, St0) ->
case vu_pattern(V, Var, St0) of
{true,_}=St1 -> St1;
@@ -234,6 +210,18 @@ vu_pat_seg_list(V, Ss, St) ->
end
end, St, Ss).
+vu_map_pairs(V, [#c_map_pair{key=Key,val=Pat}|T], St0) ->
+ case vu_expr(V, Key) of
+ true ->
+ {true,false};
+ false ->
+ case vu_pattern(V, Pat, St0) of
+ {true,_}=St -> St;
+ St -> vu_map_pairs(V, T, St)
+ end
+ end;
+vu_map_pairs(_, [], St) -> St.
+
-spec vu_var_list(cerl:var_name(), [cerl:c_var()]) -> boolean().
vu_var_list(V, Vs) ->
diff --git a/lib/compiler/src/core_lint.erl b/lib/compiler/src/core_lint.erl
index c0e2bdaba0..f62b2bb0ba 100644
--- a/lib/compiler/src/core_lint.erl
+++ b/lib/compiler/src/core_lint.erl
@@ -173,7 +173,7 @@ check_exports(Es, St) ->
end.
check_attrs(As, St) ->
- case all(fun ({#c_literal{},V}) -> core_lib:is_literal(V);
+ case all(fun ({#c_literal{},#c_literal{}}) -> true;
(_) -> false
end, As) of
true -> St;
diff --git a/lib/compiler/src/core_parse.hrl b/lib/compiler/src/core_parse.hrl
index 4a00535360..7fd4a82e4e 100644
--- a/lib/compiler/src/core_parse.hrl
+++ b/lib/compiler/src/core_parse.hrl
@@ -72,7 +72,8 @@
-record(c_map, {anno=[],
arg=#c_literal{val=#{}} :: cerl:c_var() | cerl:c_literal(),
- es :: [cerl:c_map_pair()]}).
+ es :: [cerl:c_map_pair()],
+ is_pat=false :: boolean()}).
-record(c_map_pair, {anno=[],
op :: #c_literal{val::'assoc'} | #c_literal{val::'exact'},
diff --git a/lib/compiler/src/core_parse.yrl b/lib/compiler/src/core_parse.yrl
index a66ad4235f..eeb9f5dba7 100644
--- a/lib/compiler/src/core_parse.yrl
+++ b/lib/compiler/src/core_parse.yrl
@@ -58,7 +58,8 @@ Terminals
%% Separators
-'(' ')' '{' '}' '[' ']' '|' ',' '->' '=' '/' '<' '>' ':' '-|' '#' '~' '::'
+'(' ')' '{' '}' '[' ']' '|' ',' '->' '=' '/' '<' '>' ':' '-|' '#'
+'~' '=>' ':='
%% Keywords (atoms are assumed to always be single-quoted).
@@ -123,7 +124,7 @@ function_definition ->
{'$1','$3'}.
anno_fun -> '(' fun_expr '-|' annotation ')' :
- core_lib:set_anno('$2', '$4').
+ cerl:set_ann('$2', '$4').
anno_fun -> fun_expr : '$1'.
%% Constant terms for annotations and attributes.
@@ -162,7 +163,7 @@ tail_constant -> ',' constant tail_constant : ['$2'|'$3'].
%% ( ( V -| <anno> ) = ( {a} -| <anno> ) -| <anno> )
anno_pattern -> '(' other_pattern '-|' annotation ')' :
- core_lib:set_anno('$2', '$4').
+ cerl:set_ann('$2', '$4').
anno_pattern -> other_pattern : '$1'.
anno_pattern -> anno_variable : '$1'.
@@ -182,23 +183,24 @@ atomic_pattern -> atomic_literal : '$1'.
tuple_pattern -> '{' '}' : c_tuple([]).
tuple_pattern -> '{' anno_patterns '}' : c_tuple('$2').
-map_pattern -> '~' '{' '}' '~' : #c_map{es=[]}.
+map_pattern -> '~' '{' '}' '~' : c_map_pattern([]).
map_pattern -> '~' '{' map_pair_patterns '}' '~' :
- #c_map{es=lists:sort('$3')}.
+ c_map_pattern(lists:sort('$3')).
map_pair_patterns -> map_pair_pattern : ['$1'].
map_pair_patterns -> map_pair_pattern ',' map_pair_patterns : ['$1' | '$3'].
-map_pair_pattern -> '~' '<' anno_pattern ',' anno_pattern '>' :
- #c_map_pair{op=#c_literal{val=exact},key='$3',val='$5'}.
+map_pair_pattern -> anno_expression ':=' anno_pattern :
+ #c_map_pair{op=#c_literal{val=exact},
+ key='$1',val='$3'}.
cons_pattern -> '[' anno_pattern tail_pattern :
- #c_cons{hd='$2',tl='$3'}.
+ c_cons('$2', '$3').
tail_pattern -> ']' : #c_literal{val=[]}.
tail_pattern -> '|' anno_pattern ']' : '$2'.
tail_pattern -> ',' anno_pattern tail_pattern :
- #c_cons{hd='$2',tl='$3'}.
+ c_cons('$2', '$3').
binary_pattern -> '#' '{' '}' '#' : #c_binary{segments=[]}.
binary_pattern -> '#' '{' segment_patterns '}' '#' : #c_binary{segments='$3'}.
@@ -206,7 +208,7 @@ binary_pattern -> '#' '{' segment_patterns '}' '#' : #c_binary{segments='$3'}.
segment_patterns -> segment_pattern ',' segment_patterns : ['$1' | '$3'].
segment_patterns -> segment_pattern : ['$1'].
-segment_pattern -> '#' '<' anno_pattern '>' '(' anno_patterns ')':
+segment_pattern -> '#' '<' anno_pattern '>' '(' anno_expressions ')':
case '$6' of
[S,U,T,Fs] ->
#c_bitstr{val='$3',size=S,unit=U,type=T,flags=Fs};
@@ -222,7 +224,7 @@ anno_variables -> anno_variable : ['$1'].
anno_variable -> variable : '$1'.
anno_variable -> '(' variable '-|' annotation ')' :
- core_lib:set_anno('$2', '$4').
+ cerl:set_ann('$2', '$4').
%% Expressions
%% Must split expressions into two levels as nested value expressions
@@ -230,7 +232,7 @@ anno_variable -> '(' variable '-|' annotation ')' :
anno_expression -> expression : '$1'.
anno_expression -> '(' expression '-|' annotation ')' :
- core_lib:set_anno('$2', '$4').
+ cerl:set_ann('$2', '$4').
anno_expressions -> anno_expression ',' anno_expressions : ['$1' | '$3'].
anno_expressions -> anno_expression : ['$1'].
@@ -279,15 +281,15 @@ cons_literal -> '[' literal tail_literal : c_cons('$2', '$3').
tail_literal -> ']' : #c_literal{val=[]}.
tail_literal -> '|' literal ']' : '$2'.
-tail_literal -> ',' literal tail_literal : #c_cons{hd='$2',tl='$3'}.
+tail_literal -> ',' literal tail_literal : c_cons('$2', '$3').
tuple -> '{' '}' : c_tuple([]).
tuple -> '{' anno_expressions '}' : c_tuple('$2').
-map_expr -> '~' '{' '}' '~' : #c_map{es=[]}.
-map_expr -> '~' '{' map_pairs '}' '~' : #c_map{es='$3'}.
-map_expr -> '~' '{' map_pairs '|' variable '}' '~' : #c_map{arg='$5',es='$3'}.
-map_expr -> '~' '{' map_pairs '|' map_expr '}' '~' : #c_map{arg='$5',es='$3'}.
+map_expr -> '~' '{' '}' '~' : c_map([]).
+map_expr -> '~' '{' map_pairs '}' '~' : c_map('$3').
+map_expr -> '~' '{' map_pairs '|' variable '}' '~' : ann_c_map([], '$5', '$3').
+map_expr -> '~' '{' map_pairs '|' map_expr '}' '~' : ann_c_map([], '$5', '$3').
map_pairs -> map_pair : ['$1'].
map_pairs -> map_pair ',' map_pairs : ['$1' | '$3'].
@@ -295,10 +297,10 @@ map_pairs -> map_pair ',' map_pairs : ['$1' | '$3'].
map_pair -> map_pair_assoc : '$1'.
map_pair -> map_pair_exact : '$1'.
-map_pair_assoc -> '::' '<' anno_expression ',' anno_expression'>' :
- #c_map_pair{op=#c_literal{val=assoc},key='$3',val='$5'}.
-map_pair_exact -> '~' '<' anno_expression ',' anno_expression'>' :
- #c_map_pair{op=#c_literal{val=exact},key='$3',val='$5'}.
+map_pair_assoc -> anno_expression '=>' anno_expression :
+ #c_map_pair{op=#c_literal{val=assoc},key='$1',val='$3'}.
+map_pair_exact -> anno_expression ':=' anno_expression :
+ #c_map_pair{op=#c_literal{val=exact},key='$1',val='$3'}.
cons -> '[' anno_expression tail : c_cons('$2', '$3').
@@ -307,7 +309,7 @@ tail -> '|' anno_expression ']' : '$2'.
tail -> ',' anno_expression tail : c_cons('$2', '$3').
binary -> '#' '{' '}' '#' : #c_literal{val = <<>>}.
-binary -> '#' '{' segments '}' '#' : #c_binary{segments='$3'}.
+binary -> '#' '{' segments '}' '#' : make_binary('$3').
segments -> segment ',' segments : ['$1' | '$3'].
segments -> segment : ['$1'].
@@ -326,7 +328,7 @@ function_name -> atom '/' integer :
anno_function_name -> function_name : '$1'.
anno_function_name -> '(' function_name '-|' annotation ')' :
- core_lib:set_anno('$2', '$4').
+ cerl:set_ann('$2', '$4').
let_vars -> anno_variable : ['$1'].
let_vars -> '<' '>' : [].
@@ -354,7 +356,7 @@ anno_clauses -> anno_clause : ['$1'].
anno_clause -> clause : '$1'.
anno_clause -> '(' clause '-|' annotation ')' :
- core_lib:set_anno('$2', '$4').
+ cerl:set_ann('$2', '$4').
clause -> clause_pattern 'when' anno_expression '->' anno_expression :
#c_clause{pats='$1',guard='$3',body='$5'}.
@@ -410,9 +412,55 @@ Erlang code.
-include("core_parse.hrl").
--import(cerl, [c_cons/2,c_tuple/1]).
+-import(cerl, [ann_c_map/3,c_cons/2,c_map/1,c_map_pattern/1,c_tuple/1]).
tok_val(T) -> element(3, T).
tok_line(T) -> element(2, T).
+%% make_binary([#c_bitstr{}]) -> #c_binary{} | #c_literal{}
+%% Create either #c_binary{} or #c_literal{} from the binary segments.
+%% In certain contexts, such as keys for maps, only literals and
+%% variables are allowed, so we must not create a #c_binary{}
+%% record in those situation.
+%%
+%% To keep this function simple, we use a crude heuristic. We will
+%% assume that Core Erlang has been produced by core_pp. If the
+%% segments *could* have been output from a literal binary by
+%% core_pp, we will create a #c_literal{}. Otherwise we will create a
+%% #c_binary{} record.
+
+make_binary(Segs) ->
+ try make_lit_bin(<<>>, Segs) of
+ Bs when is_bitstring(Bs) ->
+ #c_literal{val=Bs}
+ catch
+ throw:impossible ->
+ #c_binary{segments=Segs}
+ end.
+
+make_lit_bin(Acc, [#c_bitstr{val=I0,size=Sz0,unit=U0,type=Type0,flags=F0}|T]) ->
+ I = get_lit_val(I0),
+ Sz = get_lit_val(Sz0),
+ U = get_lit_val(U0),
+ Type = get_lit_val(Type0),
+ F = get_lit_val(F0),
+ if
+ is_integer(I), U =:= 1, Type =:= integer, F =:= [unsigned,big] ->
+ ok;
+ true ->
+ throw(impossible)
+ end,
+ if
+ Sz =< 8, T =:= [] ->
+ <<Acc/binary,I:Sz>>;
+ Sz =:= 8 ->
+ make_lit_bin(<<Acc/binary,I:8>>, T);
+ true ->
+ throw(impossible)
+ end;
+make_lit_bin(Acc, []) -> Acc.
+
+get_lit_val(#c_literal{val=Val}) -> Val;
+get_lit_val(_) -> throw(impossible).
+
%% vim: syntax=erlang
diff --git a/lib/compiler/src/core_pp.erl b/lib/compiler/src/core_pp.erl
index 03801a9b6d..9cfca88e8c 100644
--- a/lib/compiler/src/core_pp.erl
+++ b/lib/compiler/src/core_pp.erl
@@ -45,7 +45,7 @@ format(Node) ->
format(Node, #ctxt{}).
maybe_anno(Node, Fun, Ctxt) ->
- As = core_lib:get_anno(Node),
+ As = cerl:get_ann(Node),
case get_line(As) of
none ->
maybe_anno(Node, Fun, Ctxt, As);
@@ -183,15 +183,9 @@ format_1(#c_map{arg=Var,es=Es}, Ctxt) ->
"}~"
];
format_1(#c_map_pair{op=#c_literal{val=assoc},key=K,val=V}, Ctxt) ->
- ["::<",
- format_hseq([K,V], ",", add_indent(Ctxt, 1), fun format/2),
- ">"
- ];
+ format_map_pair("=>", K, V, Ctxt);
format_1(#c_map_pair{op=#c_literal{val=exact},key=K,val=V}, Ctxt) ->
- ["~<",
- format_hseq([K,V], ",", add_indent(Ctxt, 1), fun format/2),
- ">"
- ];
+ format_map_pair(":=", K, V, Ctxt);
format_1(#c_cons{hd=H,tl=T}, Ctxt) ->
Txt = ["["|format(H, add_indent(Ctxt, 1))],
[Txt|format_list_tail(T, add_indent(Ctxt, width(Txt, Ctxt)))];
@@ -201,7 +195,7 @@ format_1(#c_alias{var=V,pat=P}, Ctxt) ->
Txt = [format(V, Ctxt)|" = "],
[Txt|format(P, add_indent(Ctxt, width(Txt, Ctxt)))];
format_1(#c_let{vars=Vs0,arg=A,body=B}, Ctxt) ->
- Vs = [core_lib:set_anno(V, []) || V <- Vs0],
+ Vs = [cerl:set_ann(V, []) || V <- Vs0],
case is_simple_term(A) of
false ->
Ctxt1 = add_indent(Ctxt, Ctxt#ctxt.body_indent),
@@ -219,7 +213,7 @@ format_1(#c_let{vars=Vs0,arg=A,body=B}, Ctxt) ->
["let ",
format_values(Vs, add_indent(Ctxt, 4)),
" = ",
- format(core_lib:set_anno(A, []), Ctxt1),
+ format(cerl:set_ann(A, []), Ctxt1),
nl_indent(Ctxt),
"in "
| format(B, add_indent(Ctxt, 4))
@@ -448,6 +442,12 @@ format_list_tail(#c_cons{anno=[],hd=H,tl=T}, Ctxt) ->
format_list_tail(Tail, Ctxt) ->
["|",format(Tail, add_indent(Ctxt, 1)),"]"].
+format_map_pair(Op, K, V, Ctxt0) ->
+ Ctxt1 = add_indent(Ctxt0, 1),
+ Txt = format(K, set_class(Ctxt1, expr)),
+ Ctxt2 = add_indent(Ctxt0, width(Txt, Ctxt1)),
+ [Txt,Op,format(V, Ctxt2)].
+
indent(Ctxt) -> indent(Ctxt#ctxt.indent, Ctxt).
indent(N, _) when N =< 0 -> "";
diff --git a/lib/compiler/src/core_scan.erl b/lib/compiler/src/core_scan.erl
index b7799b373a..8ab20b1982 100644
--- a/lib/compiler/src/core_scan.erl
+++ b/lib/compiler/src/core_scan.erl
@@ -271,8 +271,10 @@ scan1("->" ++ Cs, Toks, Pos) ->
scan1(Cs, [{'->',Pos}|Toks], Pos);
scan1("-|" ++ Cs, Toks, Pos) ->
scan1(Cs, [{'-|',Pos}|Toks], Pos);
-scan1("::" ++ Cs, Toks, Pos) ->
- scan1(Cs, [{'::',Pos}|Toks], Pos);
+scan1(":=" ++ Cs, Toks, Pos) ->
+ scan1(Cs, [{':=',Pos}|Toks], Pos);
+scan1("=>" ++ Cs, Toks, Pos) ->
+ scan1(Cs, [{'=>',Pos}|Toks], Pos);
scan1([C|Cs], Toks, Pos) -> %Punctuation character
P = list_to_atom([C]),
scan1(Cs, [{P,Pos}|Toks], Pos);
diff --git a/lib/compiler/src/erl_bifs.erl b/lib/compiler/src/erl_bifs.erl
index 6c75538194..bcc2297250 100644
--- a/lib/compiler/src/erl_bifs.erl
+++ b/lib/compiler/src/erl_bifs.erl
@@ -134,6 +134,7 @@ is_pure(math, erf, 1) -> true;
is_pure(math, erfc, 1) -> true;
is_pure(math, exp, 1) -> true;
is_pure(math, log, 1) -> true;
+is_pure(math, log2, 1) -> true;
is_pure(math, log10, 1) -> true;
is_pure(math, pow, 2) -> true;
is_pure(math, sin, 1) -> true;
diff --git a/lib/compiler/src/sys_core_fold.erl b/lib/compiler/src/sys_core_fold.erl
index 82817a987a..ea1959d0f8 100644
--- a/lib/compiler/src/sys_core_fold.erl
+++ b/lib/compiler/src/sys_core_fold.erl
@@ -96,6 +96,10 @@
t=[], %Types
in_guard=false}). %In guard or not.
+-type type_info() :: cerl:cerl() | 'bool'.
+-type yes_no_maybe() :: 'yes' | 'no' | 'maybe'.
+-type sub() :: #sub{}.
+
-spec module(cerl:c_module(), [compile:option()]) ->
{'ok', cerl:c_module(), [_]}.
@@ -313,7 +317,7 @@ expr(#c_letrec{defs=Fs0,body=B0}=Letrec, Ctxt, Sub) ->
Fs1 = map(fun ({Name,Fb}) ->
{Name,expr(Fb, {letrec,Ctxt}, Sub)}
end, Fs0),
- B1 = body(B0, value, Sub),
+ B1 = body(B0, Ctxt, Sub),
Letrec#c_letrec{defs=Fs1,body=B1};
expr(#c_case{}=Case0, Ctxt, Sub) ->
%% Ideally, the compiler should only emit warnings when there is
@@ -462,10 +466,7 @@ is_safe_simple(#c_call{module=#c_literal{val=erlang},
case erl_internal:bool_op(Name, NumArgs) of
true ->
%% Boolean operators are safe if the arguments are boolean.
- all(fun(#c_var{name=V}) -> is_boolean_type(V, Sub);
- (#c_literal{val=Lit}) -> is_boolean(Lit);
- (_) -> false
- end, Args);
+ all(fun(C) -> is_boolean_type(C, Sub) =:= yes end, Args);
false ->
%% We need a rather complicated test to ensure that
%% we only allow safe calls that are allowed in a guard.
@@ -607,14 +608,6 @@ eval_binary_1([#c_bitstr{val=#c_literal{val=Val},size=#c_literal{val=Sz},
error:_ ->
throw(impossible)
end;
-eval_binary_1([#c_bitstr{val=#c_literal{},size=#c_literal{},
- unit=#c_literal{},type=#c_literal{},
- flags=#c_cons{}=Flags}=Bitstr|Ss], Acc0) ->
- case cerl:fold_literal(Flags) of
- #c_literal{} = Flags1 ->
- eval_binary_1([Bitstr#c_bitstr{flags=Flags1}|Ss], Acc0);
- _ -> throw(impossible)
- end;
eval_binary_1([], Acc) -> Acc;
eval_binary_1(_, _) -> throw(impossible).
@@ -688,23 +681,15 @@ count_bits_1(Int, Bits) -> count_bits_1(Int bsr 64, Bits+64).
%% a rewritten expression consisting of a sequence of
%% the arguments only is returned.
-useless_call(effect, #c_call{anno=Anno,
- module=#c_literal{val=Mod},
+useless_call(effect, #c_call{module=#c_literal{val=Mod},
name=#c_literal{val=Name},
args=Args}=Call) ->
A = length(Args),
case erl_bifs:is_safe(Mod, Name, A) of
false ->
case erl_bifs:is_pure(Mod, Name, A) of
- true ->
- case member(result_not_wanted, Anno) of
- false ->
- add_warning(Call, result_ignored);
- true ->
- ok
- end;
- false ->
- ok
+ true -> add_warning(Call, result_ignored);
+ false -> ok
end,
no;
true ->
@@ -730,385 +715,23 @@ make_effect_seq([], _) -> void().
call(#c_call{args=As}=Call, #c_literal{val=M}=M0, #c_literal{val=N}=N0, Sub) ->
case get(no_inline_list_funcs) of
true ->
- call_0(Call, M0, N0, As, Sub);
+ call_1(Call, M0, N0, As, Sub);
false ->
- call_1(Call, M, N, As, Sub)
+ case sys_core_fold_lists:call(Call, M, N, As) of
+ none ->
+ call_1(Call, M, N, As, Sub);
+ Core ->
+ expr(Core, Sub)
+ end
+
end;
call(#c_call{args=As}=Call, M, N, Sub) ->
- call_0(Call, M, N, As, Sub).
+ call_1(Call, M, N, As, Sub).
-call_0(Call, M, N, As0, Sub) ->
+call_1(Call, M, N, As0, Sub) ->
As1 = expr_list(As0, value, Sub),
fold_call(Call#c_call{args=As1}, M, N, As1, Sub).
-%% We inline some very common higher order list operations.
-%% We use the same evaluation order as the library function.
-
-call_1(#c_call{anno=Anno}, lists, all, [Arg1,Arg2], Sub) ->
- Loop = #c_var{name={'lists^all',1}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- Err1 = #c_tuple{es=[#c_literal{val='case_clause'}, X]},
- CC1 = #c_clause{pats=[#c_literal{val=true}], guard=#c_literal{val=true},
- body=#c_apply{anno=Anno, op=Loop, args=[Xs]}},
- CC2 = #c_clause{pats=[#c_literal{val=false}], guard=#c_literal{val=true},
- body=#c_literal{val=false}},
- CC3 = #c_clause{pats=[X], guard=#c_literal{val=true},
- body=match_fail(Anno, Err1)},
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=#c_case{arg=#c_apply{anno=Anno, op=F, args=[X]},
- clauses = [CC1, CC2, CC3]}},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=1}]},
- body=#c_literal{val=true}},
- Err2 = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^all',1}}|Anno], Err2)},
- Fun = #c_fun{vars=[Xs],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
- body=#c_letrec{defs=[{Loop,Fun}],
- body=#c_apply{anno=Anno, op=Loop, args=[L]}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, any, [Arg1,Arg2], Sub) ->
- Loop = #c_var{name={'lists^any',1}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- Err1 = #c_tuple{es=[#c_literal{val='case_clause'}, X]},
- CC1 = #c_clause{pats=[#c_literal{val=true}], guard=#c_literal{val=true},
- body=#c_literal{val=true}},
- CC2 = #c_clause{pats=[#c_literal{val=false}], guard=#c_literal{val=true},
- body=#c_apply{anno=Anno, op=Loop, args=[Xs]}},
- CC3 = #c_clause{pats=[X], guard=#c_literal{val=true},
- body=match_fail(Anno, Err1)},
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=#c_case{arg=#c_apply{anno=Anno, op=F, args=[X]},
- clauses = [CC1, CC2, CC3]}},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=1}]},
- body=#c_literal{val=false}},
- Err2 = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^any',1}}|Anno], Err2)},
- Fun = #c_fun{vars=[Xs],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
- body=#c_letrec{defs=[{Loop,Fun}],
- body=#c_apply{anno=Anno, op=Loop, args=[L]}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, foreach, [Arg1,Arg2], Sub) ->
- Loop = #c_var{name={'lists^foreach',1}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=#c_seq{arg=#c_apply{anno=Anno, op=F, args=[X]},
- body=#c_apply{anno=Anno, op=Loop, args=[Xs]}}},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=1}]},
- body=#c_literal{val=ok}},
- Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^foreach',1}}|Anno], Err)},
- Fun = #c_fun{vars=[Xs],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
- body=#c_letrec{defs=[{Loop,Fun}],
- body=#c_apply{anno=Anno, op=Loop, args=[L]}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, map, [Arg1,Arg2], Sub) ->
- Loop = #c_var{name={'lists^map',1}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- H = #c_var{name='H'},
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=#c_let{vars=[H], arg=#c_apply{anno=Anno,
- op=F,
- args=[X]},
- body=#c_cons{hd=H,
- anno=[compiler_generated],
- tl=#c_apply{anno=Anno,
- op=Loop,
- args=[Xs]}}}},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=1}]},
- body=#c_literal{val=[]}},
- Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^map',1}}|Anno], Err)},
- Fun = #c_fun{vars=[Xs],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
- body=#c_letrec{defs=[{Loop,Fun}],
- body=#c_apply{anno=Anno, op=Loop, args=[L]}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, flatmap, [Arg1,Arg2], Sub) ->
- Loop = #c_var{name={'lists^flatmap',1}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- H = #c_var{name='H'},
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=#c_let{vars=[H],
- arg=#c_apply{anno=Anno, op=F, args=[X]},
- body=#c_call{anno=[compiler_generated|Anno],
- module=#c_literal{val=erlang},
- name=#c_literal{val='++'},
- args=[H,
- #c_apply{anno=Anno,
- op=Loop,
- args=[Xs]}]}}},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=1}]},
- body=#c_literal{val=[]}},
- Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^flatmap',1}}|Anno], Err)},
- Fun = #c_fun{vars=[Xs],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
- body=#c_letrec{defs=[{Loop,Fun}],
- body=#c_apply{anno=Anno, op=Loop, args=[L]}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, filter, [Arg1,Arg2], Sub) ->
- Loop = #c_var{name={'lists^filter',1}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- B = #c_var{name='B'},
- Err1 = #c_tuple{es=[#c_literal{val='case_clause'}, X]},
- CC1 = #c_clause{pats=[#c_literal{val=true}], guard=#c_literal{val=true},
- body=#c_cons{anno=[compiler_generated], hd=X, tl=Xs}},
- CC2 = #c_clause{pats=[#c_literal{val=false}], guard=#c_literal{val=true},
- body=Xs},
- CC3 = #c_clause{pats=[X], guard=#c_literal{val=true},
- body=match_fail(Anno, Err1)},
- Case = #c_case{arg=B, clauses = [CC1, CC2, CC3]},
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=#c_let{vars=[B],
- arg=#c_apply{anno=Anno, op=F, args=[X]},
- body=#c_let{vars=[Xs],
- arg=#c_apply{anno=Anno,
- op=Loop,
- args=[Xs]},
- body=Case}}},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=1}]},
- body=#c_literal{val=[]}},
- Err2 = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^filter',1}}|Anno], Err2)},
- Fun = #c_fun{vars=[Xs],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
- body=#c_letrec{defs=[{Loop,Fun}],
- body=#c_apply{anno=Anno, op=Loop, args=[L]}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, foldl, [Arg1,Arg2,Arg3], Sub) ->
- Loop = #c_var{name={'lists^foldl',2}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- A = #c_var{name='A'},
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=#c_apply{anno=Anno,
- op=Loop,
- args=[Xs, #c_apply{anno=Anno,
- op=F,
- args=[X, A]}]}},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=2}]},
- body=A},
- Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, A, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^foldl',2}}|Anno], Err)},
- Fun = #c_fun{vars=[Xs, A],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, A, L], arg=#c_values{es=[Arg1, Arg2, Arg3]},
- body=#c_letrec{defs=[{Loop,Fun}],
- body=#c_apply{anno=Anno, op=Loop, args=[L, A]}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, foldr, [Arg1,Arg2,Arg3], Sub) ->
- Loop = #c_var{name={'lists^foldr',2}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- A = #c_var{name='A'},
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=#c_apply{anno=Anno,
- op=F,
- args=[X, #c_apply{anno=Anno,
- op=Loop,
- args=[Xs, A]}]}},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=2}]},
- body=A},
- Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, A, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^foldr',2}}|Anno], Err)},
- Fun = #c_fun{vars=[Xs, A],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, A, L], arg=#c_values{es=[Arg1, Arg2, Arg3]},
- body=#c_letrec{defs=[{Loop,Fun}],
- body=#c_apply{anno=Anno, op=Loop, args=[L, A]}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, mapfoldl, [Arg1,Arg2,Arg3], Sub) ->
- Loop = #c_var{name={'lists^mapfoldl',2}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- Avar = #c_var{name='A'},
- Match =
- fun (A, P, E) ->
- C1 = #c_clause{pats=[P], guard=#c_literal{val=true}, body=E},
- Err = #c_tuple{es=[#c_literal{val='badmatch'}, X]},
- C2 = #c_clause{pats=[X], guard=#c_literal{val=true},
- body=match_fail(Anno, Err)},
- #c_case{arg=A, clauses=[C1, C2]}
- end,
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=Match(#c_apply{anno=Anno, op=F, args=[X, Avar]},
- #c_tuple{es=[X, Avar]},
-%%% Tuple passing version
- Match(#c_apply{anno=Anno,
- op=Loop,
- args=[Xs, Avar]},
- #c_tuple{es=[Xs, Avar]},
- #c_tuple{anno=[compiler_generated],
- es=[#c_cons{anno=[compiler_generated],
- hd=X, tl=Xs},
- Avar]})
-%%% Multiple-value version
-%%% #c_let{vars=[Xs,A],
-%%% %% The tuple here will be optimised
-%%% %% away later; no worries.
-%%% arg=#c_apply{op=Loop, args=[Xs, A]},
-%%% body=#c_values{es=[#c_cons{hd=X, tl=Xs},
-%%% A]}}
- )},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=2}]},
-%%% Tuple passing version
- body=#c_tuple{anno=[compiler_generated],
- es=[#c_literal{val=[]}, Avar]}},
-%%% Multiple-value version
-%%% body=#c_values{es=[#c_literal{val=[]}, A]}},
- Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Avar, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^mapfoldl',2}}|Anno], Err)},
- Fun = #c_fun{vars=[Xs, Avar],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, Avar, L], arg=#c_values{es=[Arg1, Arg2, Arg3]},
- body=#c_letrec{defs=[{Loop,Fun}],
-%%% Tuple passing version
- body=#c_apply{anno=Anno,
- op=Loop,
- args=[L, Avar]}}},
-%%% Multiple-value version
-%%% body=#c_let{vars=[Xs, A],
-%%% arg=#c_apply{op=Loop,
-%%% args=[L, A]},
-%%% body=#c_tuple{es=[Xs, A]}}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, mapfoldr, [Arg1,Arg2,Arg3], Sub) ->
- Loop = #c_var{name={'lists^mapfoldr',2}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- Avar = #c_var{name='A'},
- Match =
- fun (A, P, E) ->
- C1 = #c_clause{pats=[P], guard=#c_literal{val=true}, body=E},
- Err = #c_tuple{es=[#c_literal{val='badmatch'}, X]},
- C2 = #c_clause{pats=[X], guard=#c_literal{val=true},
- body=match_fail(Anno, Err)},
- #c_case{arg=A, clauses=[C1, C2]}
- end,
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
-%%% Tuple passing version
- body=Match(#c_apply{anno=Anno,
- op=Loop,
- args=[Xs, Avar]},
- #c_tuple{es=[Xs, Avar]},
- Match(#c_apply{anno=Anno, op=F, args=[X, Avar]},
- #c_tuple{es=[X, Avar]},
- #c_tuple{anno=[compiler_generated],
- es=[#c_cons{anno=[compiler_generated],
- hd=X, tl=Xs}, Avar]}))
-%%% Multiple-value version
-%%% body=#c_let{vars=[Xs,A],
-%%% %% The tuple will be optimised away
-%%% arg=#c_apply{op=Loop, args=[Xs, A]},
-%%% body=Match(#c_apply{op=F, args=[X, A]},
-%%% #c_tuple{es=[X, A]},
-%%% #c_values{es=[#c_cons{hd=X, tl=Xs},
-%%% A]})}
- },
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=2}]},
-%%% Tuple passing version
- body=#c_tuple{anno=[compiler_generated],
- es=[#c_literal{val=[]}, Avar]}},
-%%% Multiple-value version
-%%% body=#c_values{es=[#c_literal{val=[]}, A]}},
- Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Avar, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^mapfoldr',2}}|Anno], Err)},
- Fun = #c_fun{vars=[Xs, Avar],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, Avar, L], arg=#c_values{es=[Arg1, Arg2, Arg3]},
- body=#c_letrec{defs=[{Loop,Fun}],
-%%% Tuple passing version
- body=#c_apply{anno=Anno,
- op=Loop,
- args=[L, Avar]}}},
-%%% Multiple-value version
-%%% body=#c_let{vars=[Xs, A],
-%%% arg=#c_apply{op=Loop,
-%%% args=[L, A]},
-%%% body=#c_tuple{es=[Xs, A]}}}},
- Sub);
-call_1(#c_call{module=M, name=N}=Call, _, _, As, Sub) ->
- call_0(Call, M, N, As, Sub).
-
-match_fail(Anno, Arg) ->
- #c_primop{anno=Anno,
- name=#c_literal{val='match_fail'},
- args=[Arg]}.
-
%% fold_call(Call, Mod, Name, Args, Sub) -> Expr.
%% Try to safely evaluate the call. Just try to evaluate arguments,
%% do the call and convert return values to literals. If this
@@ -1133,29 +756,33 @@ fold_call_1(Call, Mod, Name, Args, Sub) ->
true -> fold_call_2(Call, Mod, Name, Args, Sub)
end.
-fold_call_2(Call, Module, Name, Args0, Sub) ->
- try
- Args = [core_lib:literal_value(A) || A <- Args0],
- try apply(Module, Name, Args) of
- Val ->
- case cerl:is_literal_term(Val) of
- true ->
- #c_literal{val=Val};
- false ->
- %% Successful evaluation, but it was not
- %% possible to express the computed value as a literal.
- Call
- end
- catch
- error:Reason ->
- %% Evaluation of the function failed. Warn and replace
- %% the call with a call to erlang:error/1.
- eval_failure(Call, Reason)
- end
+fold_call_2(Call, Module, Name, Args, Sub) ->
+ case all(fun cerl:is_literal/1, Args) of
+ true ->
+ %% All arguments are literals.
+ fold_lit_args(Call, Module, Name, Args);
+ false ->
+ %% At least one non-literal argument.
+ fold_non_lit_args(Call, Module, Name, Args, Sub)
+ end.
+
+fold_lit_args(Call, Module, Name, Args0) ->
+ Args = [cerl:concrete(A) || A <- Args0],
+ try apply(Module, Name, Args) of
+ Val ->
+ case cerl:is_literal_term(Val) of
+ true ->
+ cerl:abstract(Val);
+ false ->
+ %% Successful evaluation, but it was not possible
+ %% to express the computed value as a literal.
+ Call
+ end
catch
- error:_ ->
- %% There was at least one non-literal argument.
- fold_non_lit_args(Call, Module, Name, Args0, Sub)
+ error:Reason ->
+ %% Evaluation of the function failed. Warn and replace
+ %% the call with a call to erlang:error/1.
+ eval_failure(Call, Reason)
end.
%% fold_non_lit_args(Call, Module, Name, Args, Sub) -> Expr.
@@ -1194,17 +821,18 @@ fold_non_lit_args(Call, _, _, _, _) -> Call.
%% Evaluate a relational operation using type information.
eval_rel_op(Call, Op, [#c_var{name=V},#c_var{name=V}], _) ->
Bool = erlang:Op(same, same),
- #c_literal{anno=core_lib:get_anno(Call),val=Bool};
-eval_rel_op(Call, '=:=', [#c_var{name=V}=Var,#c_literal{val=true}], Sub) ->
+ #c_literal{anno=cerl:get_ann(Call),val=Bool};
+eval_rel_op(Call, '=:=', [Term,#c_literal{val=true}], Sub) ->
%% BoolVar =:= true ==> BoolVar
- case is_boolean_type(V, Sub) of
- true -> Var;
- false -> Call
+ case is_boolean_type(Term, Sub) of
+ yes -> Term;
+ maybe -> Call;
+ no -> #c_literal{val=false}
end;
eval_rel_op(Call, '==', Ops, _Sub) ->
case is_exact_eq_ok(Ops) of
true ->
- Name = #c_literal{anno=core_lib:get_anno(Call),val='=:='},
+ Name = #c_literal{anno=cerl:get_ann(Call),val='=:='},
Call#c_call{name=Name};
false ->
Call
@@ -1212,7 +840,7 @@ eval_rel_op(Call, '==', Ops, _Sub) ->
eval_rel_op(Call, '/=', Ops, _Sub) ->
case is_exact_eq_ok(Ops) of
true ->
- Name = #c_literal{anno=core_lib:get_anno(Call),val='=/='},
+ Name = #c_literal{anno=cerl:get_ann(Call),val='=/='},
Call#c_call{name=Name};
false ->
Call
@@ -1229,6 +857,11 @@ is_non_numeric([H|T]) ->
is_non_numeric(H) andalso is_non_numeric(T);
is_non_numeric(Tuple) when is_tuple(Tuple) ->
is_non_numeric_tuple(Tuple, tuple_size(Tuple));
+is_non_numeric(Map) when is_map(Map) ->
+ %% Note that 17.x and 18.x compare keys in different ways.
+ %% Be very conservative -- require that both keys and values
+ %% are non-numeric.
+ is_non_numeric(maps:to_list(Map));
is_non_numeric(Num) when is_number(Num) ->
false;
is_non_numeric(_) -> true.
@@ -1242,40 +875,31 @@ is_non_numeric_tuple(_Tuple, 0) -> true.
%% there must be at least one non-literal argument (i.e.
%% there is no need to handle the case that all argments
%% are literal).
-eval_bool_op(Call, 'and', [#c_literal{val=true},#c_var{name=V}=Res], Sub) ->
- case is_boolean_type(V, Sub) of
- true -> Res;
- false-> Call
- end;
-eval_bool_op(Call, 'and', [#c_var{name=V}=Res,#c_literal{val=true}], Sub) ->
- case is_boolean_type(V, Sub) of
- true -> Res;
- false-> Call
- end;
-eval_bool_op(Call, 'and', [#c_literal{val=false}=Res,#c_var{name=V}], Sub) ->
- case is_boolean_type(V, Sub) of
- true -> Res;
- false-> Call
- end;
-eval_bool_op(Call, 'and', [#c_var{name=V},#c_literal{val=false}=Res], Sub) ->
- case is_boolean_type(V, Sub) of
- true -> Res;
- false-> Call
- end;
+
+eval_bool_op(Call, 'and', [#c_literal{val=true},Term], Sub) ->
+ eval_bool_op_1(Call, Term, Term, Sub);
+eval_bool_op(Call, 'and', [Term,#c_literal{val=true}], Sub) ->
+ eval_bool_op_1(Call, Term, Term, Sub);
+eval_bool_op(Call, 'and', [#c_literal{val=false}=Res,Term], Sub) ->
+ eval_bool_op_1(Call, Res, Term, Sub);
+eval_bool_op(Call, 'and', [Term,#c_literal{val=false}=Res], Sub) ->
+ eval_bool_op_1(Call, Res, Term, Sub);
eval_bool_op(Call, _, _, _) -> Call.
+eval_bool_op_1(Call, Res, Term, Sub) ->
+ case is_boolean_type(Term, Sub) of
+ yes -> Res;
+ no -> eval_failure(Call, badarg);
+ maybe -> Call
+ end.
+
%% Evaluate is_boolean/1 using type information.
-eval_is_boolean(Call, #c_var{name=V}, Sub) ->
- case is_boolean_type(V, Sub) of
- true -> #c_literal{val=true};
- false -> Call
- end;
-eval_is_boolean(_, #c_cons{}, _) ->
- #c_literal{val=false};
-eval_is_boolean(_, #c_tuple{}, _) ->
- #c_literal{val=false};
-eval_is_boolean(Call, _, _) ->
- Call.
+eval_is_boolean(Call, Term, Sub) ->
+ case is_boolean_type(Term, Sub) of
+ no -> #c_literal{val=false};
+ yes -> #c_literal{val=true};
+ maybe -> Call
+ end.
%% eval_length(Call, List) -> Val.
%% Evaluates the length for the prefix of List which has a known
@@ -1325,33 +949,33 @@ eval_append(Call, X, Y) ->
%% Evaluates element/2 if the position Pos is a literal and
%% the shape of the tuple Tuple is known.
%%
-eval_element(Call, #c_literal{val=Pos}, #c_tuple{es=Es}, _Types) when is_integer(Pos) ->
- if
- 1 =< Pos, Pos =< length(Es) ->
- lists:nth(Pos, Es);
- true ->
- eval_failure(Call, badarg)
- end;
-eval_element(Call, #c_literal{val=Pos}, #c_var{name=V}, Types)
+eval_element(Call, #c_literal{val=Pos}, Tuple, Types)
when is_integer(Pos) ->
- case orddict:find(V, Types#sub.t) of
- {ok,#c_tuple{es=Elements}} ->
+ case get_type(Tuple, Types) of
+ none ->
+ Call;
+ Type ->
+ Es = case cerl:is_c_tuple(Type) of
+ false -> [];
+ true -> cerl:tuple_es(Type)
+ end,
if
- 1 =< Pos, Pos =< length(Elements) ->
- case lists:nth(Pos, Elements) of
- #c_alias{var=Alias} -> Alias;
- Res -> Res
+ 1 =< Pos, Pos =< length(Es) ->
+ El = lists:nth(Pos, Es),
+ try
+ pat_to_expr(El)
+ catch
+ throw:impossible ->
+ Call
end;
true ->
+ %% Index outside tuple or not a tuple.
eval_failure(Call, badarg)
- end;
- {ok,_} ->
- eval_failure(Call, badarg);
- error ->
- Call
+ end
end;
-eval_element(Call, Pos, Tuple, _Types) ->
- case is_not_integer(Pos) orelse is_not_tuple(Tuple) of
+eval_element(Call, Pos, Tuple, Sub) ->
+ case is_int_type(Pos, Sub) =:= no orelse
+ is_tuple_type(Tuple, Sub) =:= no of
true ->
eval_failure(Call, badarg);
false ->
@@ -1361,34 +985,27 @@ eval_element(Call, Pos, Tuple, _Types) ->
%% eval_is_record(Call, Var, Tag, Size, Types) -> Val.
%% Evaluates is_record/3 using type information.
%%
-eval_is_record(Call, #c_var{name=V}, #c_literal{val=NeededTag}=Lit,
+eval_is_record(Call, Term, #c_literal{val=NeededTag},
#c_literal{val=Size}, Types) ->
- case orddict:find(V, Types#sub.t) of
- {ok,#c_tuple{es=[#c_literal{val=Tag}|_]=Es}} ->
- Lit#c_literal{val=Tag =:= NeededTag andalso
- length(Es) =:= Size};
- _ ->
- Call
+ case get_type(Term, Types) of
+ none ->
+ Call;
+ Type ->
+ Es = case cerl:is_c_tuple(Type) of
+ false -> [];
+ true -> cerl:tuple_es(Type)
+ end,
+ case Es of
+ [#c_literal{val=Tag}|_] ->
+ Bool = Tag =:= NeededTag andalso
+ length(Es) =:= Size,
+ #c_literal{val=Bool};
+ _ ->
+ #c_literal{val=false}
+ end
end;
eval_is_record(Call, _, _, _, _) -> Call.
-%% is_not_integer(Core) -> true | false.
-%% Returns true if Core is definitely not an integer.
-
-is_not_integer(#c_literal{val=Val}) when not is_integer(Val) -> true;
-is_not_integer(#c_tuple{}) -> true;
-is_not_integer(#c_cons{}) -> true;
-is_not_integer(#c_map{}) -> true;
-is_not_integer(_) -> false.
-
-%% is_not_tuple(Core) -> true | false.
-%% Returns true if Core is definitely not a tuple.
-
-is_not_tuple(#c_literal{val=Val}) when not is_tuple(Val) -> true;
-is_not_tuple(#c_cons{}) -> true;
-is_not_tuple(#c_map{}) -> true;
-is_not_tuple(_) -> false.
-
%% eval_setelement(Call, Pos, Tuple, NewVal) -> Core.
%% Evaluates setelement/3 if position Pos is an integer
%% the shape of the tuple Tuple is known.
@@ -1492,7 +1109,7 @@ clause(#c_clause{pats=Ps0,guard=G0,body=B0}=Cl, Cexpr, Ctxt, Sub0) ->
let_substs(Vs0, As0, Sub0) ->
{Vs1,Sub1} = pattern_list(Vs0, Sub0),
{Vs2,As1,Ss} = let_substs_1(Vs1, As0, Sub1),
- Sub2 = scope_add([V || #c_var{name=V} <- Vs2], Sub1),
+ Sub2 = sub_add_scope([V || #c_var{name=V} <- Vs2], Sub1),
{Vs2,As1,
foldl(fun ({V,S}, Sub) -> sub_set_name(V, S, Sub) end, Sub2, Ss)}.
@@ -1527,7 +1144,7 @@ pattern(#c_var{}=Pat, Isub, Osub) ->
true ->
V1 = make_var_name(),
Pat1 = #c_var{name=V1},
- {Pat1,sub_set_var(Pat, Pat1, scope_add([V1], Osub))};
+ {Pat1,sub_set_var(Pat, Pat1, sub_add_scope([V1], Osub))};
false ->
{Pat,sub_del_var(Pat, Osub)}
end;
@@ -1597,6 +1214,7 @@ is_subst(_) -> false.
%% sub_del_var(Var, #sub{}) -> #sub{}.
%% sub_subst_var(Var, Value, #sub{}) -> [{Name,Value}].
%% sub_is_val(Var, #sub{}) -> boolean().
+%% sub_add_scope(#sub{}) -> #sub{}
%% sub_subst_scope(#sub{}) -> #sub{}
%%
%% We use the variable name as key so as not have problems with
@@ -1607,9 +1225,10 @@ is_subst(_) -> false.
%% In addition to the list of substitutions, we also keep track of
%% all variable currently live (the scope).
%%
-%% sub_subst_scope/1 adds dummy substitutions for all variables
-%% in the scope in order to force renaming if variables in the
-%% scope occurs as pattern variables.
+%% sub_add_scope/2 adds variables to the scope. sub_subst_scope/1
+%% adds dummy substitutions for all variables in the scope in order
+%% to force renaming if variables in the scope occurs as pattern
+%% variables.
sub_new() -> #sub{v=orddict:new(),s=gb_trees:empty(),t=[]}.
@@ -1649,6 +1268,12 @@ sub_subst_var(#c_var{name=V}, Val, #sub{v=S0}) ->
%% Fold chained substitutions.
[{V,Val}] ++ [ {K,Val} || {K,#c_var{name=V1}} <- S0, V1 =:= V].
+sub_add_scope(Vs, #sub{s=Scope0}=Sub) ->
+ Scope = foldl(fun(V, S) when is_integer(V); is_atom(V) ->
+ gb_sets:add(V, S)
+ end, Scope0, Vs),
+ Sub#sub{s=Scope}.
+
sub_subst_scope(#sub{v=S0,s=Scope}=Sub) ->
S = [{-1,#c_var{name=Sv}} || Sv <- gb_sets:to_list(Scope)]++S0,
Sub#sub{v=S}.
@@ -1696,7 +1321,7 @@ clauses(E, [C0|Cs], Ctxt, Sub, LitExpr) ->
{yes,yes} ->
case LitExpr of
false ->
- Line = get_line(core_lib:get_anno(C1)),
+ Line = get_line(cerl:get_ann(C1)),
shadow_warning(Cs, Line);
true ->
%% If the case expression is a literal,
@@ -1930,7 +1555,7 @@ opt_bool_case_guard(#c_case{arg=Arg,clauses=Cs0}=Case) ->
Case;
true ->
Cs = opt_bool_case_guard(Arg, Cs0),
- Case#c_case{arg=#c_values{anno=core_lib:get_anno(Arg),es=[]},
+ Case#c_case{arg=#c_values{anno=cerl:get_ann(Arg),es=[]},
clauses=Cs}
end.
@@ -1978,6 +1603,7 @@ eval_case(#c_case{arg=E,clauses=[#c_clause{pats=Ps0,
%% is correct, the clause will always match at run-time.
Case;
{true,Bs} ->
+ eval_case_warn(B),
{Ps,As} = unzip(Bs),
InnerLet = cerl:c_let(Ps, core_lib:make_values(As), B),
Let = cerl:c_let(Vs, E, InnerLet),
@@ -1985,6 +1611,19 @@ eval_case(#c_case{arg=E,clauses=[#c_clause{pats=Ps0,
end;
eval_case(Case, _) -> Case.
+eval_case_warn(#c_primop{anno=Anno,
+ name=#c_literal{val=match_fail},
+ args=[#c_literal{val=Reason}]}=Core)
+ when is_atom(Reason) ->
+ case member(eval_failure, Anno) of
+ false ->
+ ok;
+ true ->
+ %% Example: M = not_map, M#{k:=v}
+ add_warning(Core, {eval_failure,Reason})
+ end;
+eval_case_warn(_) -> ok.
+
%% case_opt(CaseArg, [Clause]) -> {CaseArg,[Clause]}.
%% Try and optimise a case by avoid building tuples or lists
%% in the case expression. Instead combine the variable parts
@@ -2041,182 +1680,259 @@ case_opt_args([], Cs, _Sub, _LitExpr, Acc) ->
%% or to remove a literal argument.
%%
case_opt_arg(E0, Sub, Cs, LitExpr) ->
- E = maybe_replace_var(E0, Sub),
- case cerl:is_data(E) of
+ case cerl:is_c_var(E0) of
false ->
- {error,Cs};
+ case_opt_arg_1(E0, Cs, LitExpr);
true ->
+ case case_will_var_match(Cs) of
+ true ->
+ %% All clauses will match a variable in the
+ %% current position. Don't expand this variable
+ %% (that can only make the code worse).
+ {error,Cs};
+ false ->
+ %% If possible, expand this variable to a previously
+ %% matched term.
+ E = case_expand_var(E0, Sub),
+ case_opt_arg_1(E, Cs, LitExpr)
+ end
+ end.
+
+case_opt_arg_1(E0, Cs0, LitExpr) ->
+ case cerl:is_data(E0) of
+ false ->
+ {error,Cs0};
+ true ->
+ E = case_opt_compiler_generated(E0),
+ Cs = case_opt_nomatch(E, Cs0, LitExpr),
case cerl:data_type(E) of
{atomic,_} ->
- case_opt_lit(E, Cs, LitExpr);
+ case_opt_lit(E, Cs);
_ ->
- case_opt_data(E, Cs, LitExpr)
+ case_opt_data(E, Cs)
end
end.
-%% maybe_replace_var(Expr0, Sub) -> Expr
+%% case_will_var_match([Clause]) -> true | false.
+%% Return if all clauses will match a variable in the
+%% current position.
+%%
+case_will_var_match(Cs) ->
+ all(fun({[P|_],_,_,_}) ->
+ case cerl_clauses:match(P, any) of
+ {true,_} -> true;
+ _ -> false
+ end
+ end, Cs).
+
+
+%% case_opt_compiler_generated(Core) -> Core'
+%% Mark Core expressions as compiler generated to ensure that
+%% no warnings are generated if they turn out to be unused.
+%% To pretty-printed Core Erlang easier to read, don't mark
+%% constructs that can't cause warnings to be emitted.
+%%
+case_opt_compiler_generated(Core) ->
+ F = fun(C) ->
+ case cerl:type(C) of
+ alias -> C;
+ var -> C;
+ _ -> cerl:set_ann(C, [compiler_generated])
+ end
+ end,
+ cerl_trees:map(F, Core).
+
+
+%% case_expand_var(Expr0, Sub) -> Expr
%% If Expr0 is a variable that has been previously matched and
%% is known to be a tuple, return the tuple instead. Otherwise
%% return Expr0 unchanged.
%%
-maybe_replace_var(E, Sub) ->
- case cerl:is_c_var(E) of
- false -> E;
- true -> maybe_replace_var_1(E, Sub)
- end.
-
-maybe_replace_var_1(E, #sub{t=Tdb}) ->
+case_expand_var(E, #sub{t=Tdb}) ->
case orddict:find(cerl:var_name(E), Tdb) of
{ok,T0} ->
case cerl:is_c_tuple(T0) of
false ->
E;
true ->
- cerl_trees:map(fun(C) ->
- case cerl:is_c_alias(C) of
- false -> C;
- true -> cerl:alias_pat(C)
- end
- end, T0)
+ %% The pattern was a tuple. Now we must make sure
+ %% that the elements of the tuple are suitable. In
+ %% particular, we don't want binary or map
+ %% construction here, since that means that the
+ %% binary or map will be constructed in the 'case'
+ %% argument. That is wasteful for binaries. Even
+ %% worse is that any map pattern that use the ':='
+ %% operator will fail when used in map
+ %% construction (only the '=>' operator is allowed
+ %% when constructing a map from scratch).
+ try
+ cerl_trees:map(fun coerce_to_data/1, T0)
+ catch
+ throw:impossible ->
+ %% Something unsuitable was found (map or
+ %% or binary). Keep the variable.
+ E
+ end
end;
error ->
E
end.
-%% case_opt_lit(Literal, Clauses0, LitExpr) ->
-%% {ok,[],Clauses} | error
-%% The current part of the case expression is a literal. That
-%% means that we will know at compile-time whether a clause
-%% will match, and we can remove the corresponding pattern from
-%% each clause.
-%%
-%% The only complication is if the literal is a binary. Binary
-%% pattern matching is tricky, so we will give up in that case.
+%% coerce_to_data(Core) -> Core'
+%% Coerce an element originally from a pattern to an data item or or
+%% variable. Throw an 'impossible' exception if non-data Core Erlang
+%% terms such as binary construction or map construction are
+%% encountered.
-case_opt_lit(Lit, Cs0, LitExpr) ->
- Cs1 = case_opt_lit_1(Lit, Cs0, LitExpr),
- try case_opt_lit_2(Lit, Cs1) of
- Cs ->
- {ok,[],Cs}
- catch
- throw:impossible ->
- {error,Cs1}
+coerce_to_data(C) ->
+ case cerl:is_c_alias(C) of
+ false ->
+ case cerl:is_data(C) orelse cerl:is_c_var(C) of
+ true -> C;
+ false -> throw(impossible)
+ end;
+ true ->
+ coerce_to_data(cerl:alias_pat(C))
end.
-case_opt_lit_1(E, [{[P|_],C,_,_}=Current|Cs], LitExpr) ->
+%% case_opt_nomatch(E, Clauses, LitExpr) -> Clauses'
+%% Remove all clauses that cannot possibly match.
+
+case_opt_nomatch(E, [{[P|_],C,_,_}=Current|Cs], LitExpr) ->
case cerl_clauses:match(P, E) of
none ->
- %% The pattern will not match the literal. Remove the clause.
- %% Unless the entire case expression is a literal, also
- %% emit a warning.
+ %% The pattern will not match the case expression. Remove
+ %% the clause. Unless the entire case expression is a
+ %% literal, also emit a warning.
case LitExpr of
false -> add_warning(C, nomatch_clause_type);
true -> ok
end,
- case_opt_lit_1(E, Cs, LitExpr);
+ case_opt_nomatch(E, Cs, LitExpr);
_ ->
- [Current|case_opt_lit_1(E, Cs, LitExpr)]
+ [Current|case_opt_nomatch(E, Cs, LitExpr)]
end;
-case_opt_lit_1(_, [], _) -> [].
+case_opt_nomatch(_, [], _) -> [].
+
+%% case_opt_lit(Literal, Clauses0) -> {ok,[],Clauses} | error
+%% The current part of the case expression is a literal. That
+%% means that we will know at compile-time whether a clause
+%% will match, and we can remove the corresponding pattern from
+%% each clause.
+%%
+%% The only complication is if the literal is a binary or map.
+%% In general, it is difficult to know whether a binary or
+%% map pattern will match, so we give up in that case.
+
+case_opt_lit(Lit, Cs0) ->
+ try case_opt_lit_1(Lit, Cs0) of
+ Cs ->
+ {ok,[],Cs}
+ catch
+ throw:impossible ->
+ {error,Cs0}
+ end.
-case_opt_lit_2(E, [{[P|Ps],C,PsAcc,Bs0}|Cs]) ->
- %% Non-matching clauses have already been removed in case_opt_lit_1/3.
+case_opt_lit_1(E, [{[P|Ps],C,PsAcc,Bs0}|Cs]) ->
+ %% Non-matching clauses have already been removed
+ %% in case_opt_nomatch/3.
case cerl_clauses:match(P, E) of
{true,Bs} ->
%% The pattern matches the literal. Remove the pattern
%% and update the bindings.
- [{Ps,C,PsAcc,Bs++Bs0}|case_opt_lit_2(E, Cs)];
+ [{Ps,C,PsAcc,Bs++Bs0}|case_opt_lit_1(E, Cs)];
{false,_} ->
%% Binary literal and pattern. We are not sure whether
%% the pattern will match.
throw(impossible)
end;
-case_opt_lit_2(_, []) -> [].
+case_opt_lit_1(_, []) -> [].
%% case_opt_data(Expr, Clauses0, LitExpr) -> {ok,Exprs,Clauses}
+%% The case expression is a non-atomic data constructor (cons
+%% or tuple). We can know at compile time whether each clause
+%% will match, and we can delay the building of the data to
+%% the clauses where it is actually needed.
-case_opt_data(E, Cs0, LitExpr) ->
+case_opt_data(E, Cs0) ->
Es = cerl:data_es(E),
- Cs = case_opt_data_1(Cs0, Es,
- {cerl:data_type(E),cerl:data_arity(E)},
- LitExpr),
- {ok,Es,Cs}.
-
-case_opt_data_1([{[P|Ps0],C,PsAcc,Bs0}|Cs], Es, TypeSig, LitExpr) ->
- case case_data_pat(P, TypeSig) of
- {ok,Ps1,Bs1} ->
- [{Ps1++Ps0,C,PsAcc,Bs1++Bs0}|
- case_opt_data_1(Cs, Es, TypeSig,LitExpr)];
- error ->
- case LitExpr of
- false -> add_warning(C, nomatch_clause_type);
- true -> ok
- end,
- case_opt_data_1(Cs, Es, TypeSig, LitExpr)
- end;
-case_opt_data_1([], _, _, _) -> [].
-
-%% case_data_pat(Pattern, Type, Arity) -> {ok,[Pattern],[{AliasVar,Pat}]} | error.
-
-case_data_pat(P, TypeSig) ->
- case cerl:is_data(P) of
- false ->
- case_data_pat_var(P, TypeSig);
- true ->
- case {cerl:data_type(P),cerl:data_arity(P)} of
- TypeSig ->
- {ok,cerl:data_es(P),[]};
- {_,_} ->
- error
- end
+ TypeSig = {cerl:data_type(E),cerl:data_arity(E)},
+ try case_opt_data_1(Cs0, Es, TypeSig) of
+ Cs ->
+ {ok,Es,Cs}
+ catch
+ throw:impossible ->
+ %% The pattern contained a binary or map.
+ {error,Cs0}
end.
-%% case_data_pat_var(Pattern, {DataType,ArityType}) ->
-%% {ok,[Pattern],[{AliasVar,Pat}]}
+case_opt_data_1([{[P0|Ps0],C,PsAcc,Bs0}|Cs], Es, TypeSig) ->
+ P = case_opt_compiler_generated(P0),
+ BindTo = #c_var{name=dummy},
+ {Ps1,[{BindTo,_}|Bs1]} = case_data_pat_alias(P, BindTo, TypeSig, []),
+ [{Ps1++Ps0,C,PsAcc,Bs1++Bs0}|case_opt_data_1(Cs, Es, TypeSig)];
+case_opt_data_1([], _, _) -> [].
-case_data_pat_var(P, {Type,Arity}=TypeSig) ->
- %% If the entire case statement is evaluated in an effect
- %% context (e.g. "case {A,B} of ... end, ok"), there will
- %% be a warning that a term is constructed but never used.
- %% To avoid that warning, we must annotate the data
- %% constructor as compiler generated.
- Ann = [compiler_generated|cerl:get_ann(P)],
+case_data_pat_alias(P, BindTo0, TypeSig, Bs0) ->
case cerl:type(P) of
- var ->
- Vars = make_vars(cerl:get_ann(P), Arity),
- {ok,Vars,[{P,cerl:ann_make_data(Ann, Type, Vars)}]};
alias ->
- V = cerl:alias_var(P),
- Apat = cerl:alias_pat(P),
- case case_data_pat(Apat, TypeSig) of
- {ok,Ps,Bs} ->
- {ok,Ps,[{V,cerl:ann_make_data(Ann, Type, unalias_pat_list(Ps))}|Bs]};
- error ->
- error
- end;
+ %% Recursively handle the pattern and bind to
+ %% the alias variable.
+ BindTo = cerl:alias_var(P),
+ Apat0 = cerl:alias_pat(P),
+ Ann = [compiler_generated],
+ Apat = cerl:set_ann(Apat0, Ann),
+ {Ps,Bs} = case_data_pat_alias(Apat, BindTo, TypeSig, Bs0),
+ {Ps,[{BindTo0,BindTo}|Bs]};
+ var ->
+ %% Here we will need to actually build the data and bind
+ %% it to the variable.
+ {Type,Arity} = TypeSig,
+ Vars = make_vars([], Arity),
+ Ann = [compiler_generated],
+ Data = cerl:ann_make_data(Ann, Type, Vars),
+ Bs = [{BindTo0,P},{P,Data}|Bs0],
+ {Vars,Bs};
_ ->
- error
+ %% Since case_opt_nomatch/3 has removed all clauses that
+ %% cannot match, we KNOW that this clause must match and
+ %% that the pattern must be a data constructor.
+ %% Here we must build the data and bind it to the variable.
+ {Type,_} = TypeSig,
+ DataEs = cerl:data_es(P),
+ Vars = pat_to_expr_list(DataEs),
+ Ann = [compiler_generated],
+ Data = cerl:ann_make_data(Ann, Type, Vars),
+ {DataEs,[{BindTo0,Data}]}
end.
-%% unalias_pat(Pattern) -> Pattern.
-%% Remove all the aliases in a pattern but using the alias variables
-%% instead of the values. We KNOW they will be bound.
+%% pat_to_expr(Pattern) -> Expression.
+%% Convert a pattern to an expression if possible. We KNOW that
+%% all variables in the pattern will be bound.
+%%
+%% Throw an 'impossible' exception if a map or (non-literal)
+%% binary is encountered. Trying to use a map pattern as an
+%% expression is incorrect, while rebuilding a potentially
+%% huge binary in an expression would be wasteful.
-unalias_pat(P) ->
- case cerl:is_c_alias(P) of
- true ->
+pat_to_expr(P) ->
+ case cerl:type(P) of
+ alias ->
cerl:alias_var(P);
- false ->
+ var ->
+ P;
+ _ ->
case cerl:is_data(P) of
false ->
- P;
+ %% Map or binary.
+ throw(impossible);
true ->
- Es = unalias_pat_list(cerl:data_es(P)),
+ Es = pat_to_expr_list(cerl:data_es(P)),
cerl:update_data(P, cerl:data_type(P), Es)
end
end.
-unalias_pat_list(Ps) -> [unalias_pat(P) || P <- Ps].
+pat_to_expr_list(Ps) -> [pat_to_expr(P) || P <- Ps].
make_vars(A, Max) ->
make_vars(A, 1, Max).
@@ -2234,18 +1950,11 @@ make_var_name() ->
list_to_atom("fol"++integer_to_list(N)).
letify(Bs, Body) ->
+ Ann = cerl:get_ann(Body),
foldr(fun({V,Val}, B) ->
- letify(V, Val, B)
+ cerl:ann_c_let(Ann, [V], Val, B)
end, Body, Bs).
-letify(#c_var{name=Vname}=Var, Val, Body) ->
- case core_lib:is_var_used(Vname, Body) of
- true ->
- A = element(2, Body),
- #c_let{anno=A,vars=[Var],arg=Val,body=Body};
- false -> Body
- end.
-
%% opt_case_in_let(LetExpr) -> LetExpr'
opt_case_in_let(#c_let{vars=Vs,arg=Arg,body=B}=Let, Sub) ->
@@ -2334,11 +2043,8 @@ is_bool_expr(#c_let{vars=[V],arg=Arg,body=B}, Sub0) ->
is_bool_expr(#c_let{body=B}, Sub) ->
%% Binding of multiple variables.
is_bool_expr(B, Sub);
-is_bool_expr(#c_literal{val=Bool}, _) when is_boolean(Bool) ->
- true;
-is_bool_expr(#c_var{name=V}, Sub) ->
- is_boolean_type(V, Sub);
-is_bool_expr(_, _) -> false.
+is_bool_expr(C, Sub) ->
+ is_boolean_type(C, Sub) =:= yes.
is_bool_expr_list([C|Cs], Sub) ->
is_bool_expr(C, Sub) andalso is_bool_expr_list(Cs, Sub);
@@ -2552,12 +2258,6 @@ move_let_into_expr(_Let, _Expr, _Sub) -> impossible.
is_failing_clause(#c_clause{body=B}) ->
will_fail(B).
-scope_add(Vs, #sub{s=Scope0}=Sub) ->
- Scope = foldl(fun(V, S) when is_integer(V); is_atom(V) ->
- gb_sets:add(V, S)
- end, Scope0, Vs),
- Sub#sub{s=Scope}.
-
%% opt_simple_let(#c_let{}, Context, Sub) -> CoreTerm
%% Optimize a let construct that does not contain any lets in
%% in its argument.
@@ -2586,31 +2286,7 @@ opt_simple_let_1(#c_let{vars=Vs0,body=B0}=Let, Arg0, Ctxt, Sub0) ->
Arg = core_lib:make_values(Args),
opt_simple_let_2(Let, Vs, Arg, B, Ctxt, Sub1).
-opt_simple_let_2(Let0, Vs0, Arg0, Body0, effect, Sub) ->
- case {Vs0,Arg0,Body0} of
- {[],#c_values{es=[]},Body} ->
- %% No variables left (because of substitutions).
- Body;
- {[_|_],Arg,#c_literal{}} ->
- %% The body is a literal. That means that we can ignore
- %% it and that the return value is Arg revisited in
- %% effect context.
- body(Arg, effect, sub_new_preserve_types(Sub));
- {Vs,Arg,Body} ->
- %% Since we are in effect context, there is a chance
- %% that the body no longer references the variables.
- %% In that case we can construct a sequence and visit
- %% that in effect context:
- %% let <Var> = Arg in BodyWithoutVar ==> seq Arg BodyWithoutVar
- case is_any_var_used(Vs, Body) of
- false ->
- expr(#c_seq{arg=Arg,body=Body}, effect, sub_new_preserve_types(Sub));
- true ->
- Let = Let0#c_let{vars=Vs,arg=Arg,body=Body},
- opt_case_in_let_arg(opt_case_in_let(Let, Sub), effect, Sub)
- end
- end;
-opt_simple_let_2(Let, Vs0, Arg0, Body, value, Sub) ->
+opt_simple_let_2(Let0, Vs0, Arg0, Body, Ctxt, Sub) ->
case {Vs0,Arg0,Body} of
{[#c_var{name=N1}],Arg,#c_var{name=N2}} ->
case N1 =:= N2 of
@@ -2619,19 +2295,38 @@ opt_simple_let_2(Let, Vs0, Arg0, Body, value, Sub) ->
Arg;
false ->
%% let <Var> = Arg in <OtherVar> ==> seq Arg OtherVar
- expr(#c_seq{arg=Arg,body=Body}, value, sub_new_preserve_types(Sub))
+ expr(#c_seq{arg=Arg,body=Body}, Ctxt,
+ sub_new_preserve_types(Sub))
end;
{[],#c_values{es=[]},_} ->
%% No variables left.
Body;
{_,Arg,#c_literal{}} ->
- %% The variable is not used in the body. The argument
- %% can be evaluated in effect context to simplify it.
- expr(#c_seq{arg=Arg,body=Body}, value, sub_new_preserve_types(Sub));
+ E = case Ctxt of
+ effect ->
+ %% Throw away the literal body.
+ Arg;
+ value ->
+ %% Since the variable is not used in the body, we
+ %% can rewrite the let to a sequence.
+ %% let <Var> = Arg in Literal ==> seq Arg Literal
+ #c_seq{arg=Arg,body=Body}
+ end,
+ expr(E, Ctxt, sub_new_preserve_types(Sub));
{Vs,Arg,Body} ->
- opt_case_in_let_arg(
- opt_case_in_let(Let#c_let{vars=Vs,arg=Arg,body=Body}, Sub),
- value, Sub)
+ %% If none of the variables are used in the body, we can
+ %% rewrite the let to a sequence:
+ %% let <Var> = Arg in BodyWithoutVar ==>
+ %% seq Arg BodyWithoutVar
+ case is_any_var_used(Vs, Body) of
+ false ->
+ expr(#c_seq{arg=Arg,body=Body}, Ctxt,
+ sub_new_preserve_types(Sub));
+ true ->
+ Let1 = Let0#c_let{vars=Vs,arg=Arg,body=Body},
+ Let2 = opt_case_in_let(Let1, Sub),
+ opt_case_in_let_arg(Let2, Ctxt, Sub)
+ end
end.
move_case_into_arg(#c_case{arg=#c_let{vars=OuterVars0,arg=OuterArg,
@@ -2754,12 +2449,61 @@ is_any_var_used([#c_var{name=V}|Vs], Expr) ->
end;
is_any_var_used([], _) -> false.
-is_boolean_type(V, #sub{t=Tdb}) ->
+%%%
+%%% Retrieving information about types.
+%%%
+
+-spec get_type(cerl:cerl(), #sub{}) -> type_info() | 'none'.
+
+get_type(#c_var{name=V}, #sub{t=Tdb}) ->
case orddict:find(V, Tdb) of
- {ok,bool} -> true;
- _ -> false
+ {ok,Type} -> Type;
+ error -> none
+ end;
+get_type(C, _) ->
+ case cerl:type(C) of
+ binary -> C;
+ map -> C;
+ _ ->
+ case cerl:is_data(C) of
+ true -> C;
+ false -> none
+ end
end.
+-spec is_boolean_type(cerl:cerl(), sub()) -> yes_no_maybe().
+
+is_boolean_type(Var, Sub) ->
+ case get_type(Var, Sub) of
+ none ->
+ maybe;
+ bool ->
+ yes;
+ C ->
+ B = cerl:is_c_atom(C) andalso
+ is_boolean(cerl:atom_val(C)),
+ yes_no(B)
+ end.
+
+-spec is_int_type(cerl:cerl(), sub()) -> yes_no_maybe().
+
+is_int_type(Var, Sub) ->
+ case get_type(Var, Sub) of
+ none -> maybe;
+ C -> yes_no(cerl:is_c_int(C))
+ end.
+
+-spec is_tuple_type(cerl:cerl(), sub()) -> yes_no_maybe().
+
+is_tuple_type(Var, Sub) ->
+ case get_type(Var, Sub) of
+ none -> maybe;
+ C -> yes_no(cerl:is_c_tuple(C))
+ end.
+
+yes_no(true) -> yes;
+yes_no(false) -> no.
+
%% update_types(Expr, Pattern, Sub) -> Sub'
%% Update the type database.
update_types(Expr, Pat, #sub{t=Tdb0}=Sub) ->
@@ -3081,11 +2825,11 @@ add_bin_opt_info(Core, Term) ->
end.
add_warning(Core, Term) ->
- case is_compiler_generated(Core) of
+ case suppress_warning(Core) of
true ->
ok;
false ->
- Anno = core_lib:get_anno(Core),
+ Anno = cerl:get_ann(Core),
Line = get_line(Anno),
File = get_file(Anno),
Key = {?MODULE,warnings},
@@ -3106,9 +2850,17 @@ get_file([{file,File}|_]) -> File;
get_file([_|T]) -> get_file(T);
get_file([]) -> "no_file". % should not happen
+suppress_warning(Core) ->
+ is_compiler_generated(Core) orelse
+ is_result_unwanted(Core).
+
is_compiler_generated(Core) ->
- Anno = core_lib:get_anno(Core),
- member(compiler_generated, Anno).
+ Ann = cerl:get_ann(Core),
+ member(compiler_generated, Ann).
+
+is_result_unwanted(Core) ->
+ Ann = cerl:get_ann(Core),
+ member(result_not_wanted, Ann).
get_warnings() ->
ordsets:from_list((erase({?MODULE,warnings}))).
diff --git a/lib/compiler/src/sys_core_fold_lists.erl b/lib/compiler/src/sys_core_fold_lists.erl
new file mode 100644
index 0000000000..49dc59052a
--- /dev/null
+++ b/lib/compiler/src/sys_core_fold_lists.erl
@@ -0,0 +1,386 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%% Purpose : Inline high order lists functions from the lists module.
+
+-module(sys_core_fold_lists).
+
+-export([call/4]).
+
+-include("core_parse.hrl").
+
+%% We inline some very common higher order list operations.
+%% We use the same evaluation order as the library function.
+
+-spec call(cerl:c_call(), atom(), atom(), [cerl:cerl()]) ->
+ 'none' | cerl:cerl().
+
+call(#c_call{anno=Anno}, lists, all, [Arg1,Arg2]) ->
+ Loop = #c_var{name={'lists^all',1}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ Err1 = #c_tuple{es=[#c_literal{val='case_clause'}, X]},
+ CC1 = #c_clause{pats=[#c_literal{val=true}], guard=#c_literal{val=true},
+ body=#c_apply{anno=Anno, op=Loop, args=[Xs]}},
+ CC2 = #c_clause{pats=[#c_literal{val=false}], guard=#c_literal{val=true},
+ body=#c_literal{val=false}},
+ CC3 = #c_clause{pats=[X], guard=#c_literal{val=true},
+ body=match_fail(Anno, Err1)},
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=#c_case{arg=#c_apply{anno=Anno, op=F, args=[X]},
+ clauses = [CC1, CC2, CC3]}},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=1}]},
+ body=#c_literal{val=true}},
+ Err2 = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^all',1}}|Anno], Err2)},
+ Fun = #c_fun{vars=[Xs],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+ body=#c_apply{anno=Anno, op=Loop, args=[L]}}};
+call(#c_call{anno=Anno}, lists, any, [Arg1,Arg2]) ->
+ Loop = #c_var{name={'lists^any',1}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ Err1 = #c_tuple{es=[#c_literal{val='case_clause'}, X]},
+ CC1 = #c_clause{pats=[#c_literal{val=true}], guard=#c_literal{val=true},
+ body=#c_literal{val=true}},
+ CC2 = #c_clause{pats=[#c_literal{val=false}], guard=#c_literal{val=true},
+ body=#c_apply{anno=Anno, op=Loop, args=[Xs]}},
+ CC3 = #c_clause{pats=[X], guard=#c_literal{val=true},
+ body=match_fail(Anno, Err1)},
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=#c_case{arg=#c_apply{anno=Anno, op=F, args=[X]},
+ clauses = [CC1, CC2, CC3]}},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=1}]},
+ body=#c_literal{val=false}},
+ Err2 = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^any',1}}|Anno], Err2)},
+ Fun = #c_fun{vars=[Xs],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+ body=#c_apply{anno=Anno, op=Loop, args=[L]}}};
+call(#c_call{anno=Anno}, lists, foreach, [Arg1,Arg2]) ->
+ Loop = #c_var{name={'lists^foreach',1}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=#c_seq{arg=#c_apply{anno=Anno, op=F, args=[X]},
+ body=#c_apply{anno=Anno, op=Loop, args=[Xs]}}},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=1}]},
+ body=#c_literal{val=ok}},
+ Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^foreach',1}}|Anno], Err)},
+ Fun = #c_fun{vars=[Xs],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+ body=#c_apply{anno=Anno, op=Loop, args=[L]}}};
+call(#c_call{anno=Anno}, lists, map, [Arg1,Arg2]) ->
+ Loop = #c_var{name={'lists^map',1}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ H = #c_var{name='H'},
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=#c_let{vars=[H], arg=#c_apply{anno=Anno,
+ op=F,
+ args=[X]},
+ body=#c_cons{hd=H,
+ anno=[compiler_generated],
+ tl=#c_apply{anno=Anno,
+ op=Loop,
+ args=[Xs]}}}},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=1}]},
+ body=#c_literal{val=[]}},
+ Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^map',1}}|Anno], Err)},
+ Fun = #c_fun{vars=[Xs],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+ body=#c_apply{anno=Anno, op=Loop, args=[L]}}};
+call(#c_call{anno=Anno}, lists, flatmap, [Arg1,Arg2]) ->
+ Loop = #c_var{name={'lists^flatmap',1}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ H = #c_var{name='H'},
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=#c_let{vars=[H],
+ arg=#c_apply{anno=Anno, op=F, args=[X]},
+ body=#c_call{anno=[compiler_generated|Anno],
+ module=#c_literal{val=erlang},
+ name=#c_literal{val='++'},
+ args=[H,
+ #c_apply{anno=Anno,
+ op=Loop,
+ args=[Xs]}]}}},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=1}]},
+ body=#c_literal{val=[]}},
+ Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^flatmap',1}}|Anno], Err)},
+ Fun = #c_fun{vars=[Xs],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+ body=#c_apply{anno=Anno, op=Loop, args=[L]}}};
+call(#c_call{anno=Anno}, lists, filter, [Arg1,Arg2]) ->
+ Loop = #c_var{name={'lists^filter',1}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ B = #c_var{name='B'},
+ Err1 = #c_tuple{es=[#c_literal{val='case_clause'}, X]},
+ CC1 = #c_clause{pats=[#c_literal{val=true}], guard=#c_literal{val=true},
+ body=#c_cons{anno=[compiler_generated], hd=X, tl=Xs}},
+ CC2 = #c_clause{pats=[#c_literal{val=false}], guard=#c_literal{val=true},
+ body=Xs},
+ CC3 = #c_clause{pats=[X], guard=#c_literal{val=true},
+ body=match_fail(Anno, Err1)},
+ Case = #c_case{arg=B, clauses = [CC1, CC2, CC3]},
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=#c_let{vars=[B],
+ arg=#c_apply{anno=Anno, op=F, args=[X]},
+ body=#c_let{vars=[Xs],
+ arg=#c_apply{anno=Anno,
+ op=Loop,
+ args=[Xs]},
+ body=Case}}},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=1}]},
+ body=#c_literal{val=[]}},
+ Err2 = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^filter',1}}|Anno], Err2)},
+ Fun = #c_fun{vars=[Xs],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+ body=#c_apply{anno=Anno, op=Loop, args=[L]}}};
+call(#c_call{anno=Anno}, lists, foldl, [Arg1,Arg2,Arg3]) ->
+ Loop = #c_var{name={'lists^foldl',2}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ A = #c_var{name='A'},
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=#c_apply{anno=Anno,
+ op=Loop,
+ args=[Xs, #c_apply{anno=Anno,
+ op=F,
+ args=[X, A]}]}},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=2}]},
+ body=A},
+ Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, A, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^foldl',2}}|Anno], Err)},
+ Fun = #c_fun{vars=[Xs, A],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, A, L], arg=#c_values{es=[Arg1, Arg2, Arg3]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+ body=#c_apply{anno=Anno, op=Loop, args=[L, A]}}};
+call(#c_call{anno=Anno}, lists, foldr, [Arg1,Arg2,Arg3]) ->
+ Loop = #c_var{name={'lists^foldr',2}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ A = #c_var{name='A'},
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=#c_apply{anno=Anno,
+ op=F,
+ args=[X, #c_apply{anno=Anno,
+ op=Loop,
+ args=[Xs, A]}]}},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=2}]},
+ body=A},
+ Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, A, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^foldr',2}}|Anno], Err)},
+ Fun = #c_fun{vars=[Xs, A],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, A, L], arg=#c_values{es=[Arg1, Arg2, Arg3]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+ body=#c_apply{anno=Anno, op=Loop, args=[L, A]}}};
+call(#c_call{anno=Anno}, lists, mapfoldl, [Arg1,Arg2,Arg3]) ->
+ Loop = #c_var{name={'lists^mapfoldl',2}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ Avar = #c_var{name='A'},
+ Match =
+ fun (A, P, E) ->
+ C1 = #c_clause{pats=[P], guard=#c_literal{val=true}, body=E},
+ Err = #c_tuple{es=[#c_literal{val='badmatch'}, X]},
+ C2 = #c_clause{pats=[X], guard=#c_literal{val=true},
+ body=match_fail(Anno, Err)},
+ #c_case{arg=A, clauses=[C1, C2]}
+ end,
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=Match(#c_apply{anno=Anno, op=F, args=[X, Avar]},
+ #c_tuple{es=[X, Avar]},
+%%% Tuple passing version
+ Match(#c_apply{anno=Anno,
+ op=Loop,
+ args=[Xs, Avar]},
+ #c_tuple{es=[Xs, Avar]},
+ #c_tuple{anno=[compiler_generated],
+ es=[#c_cons{anno=[compiler_generated],
+ hd=X, tl=Xs},
+ Avar]})
+%%% Multiple-value version
+%%% #c_let{vars=[Xs,A],
+%%% %% The tuple here will be optimised
+%%% %% away later; no worries.
+%%% arg=#c_apply{op=Loop, args=[Xs, A]},
+%%% body=#c_values{es=[#c_cons{hd=X, tl=Xs},
+%%% A]}}
+ )},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=2}]},
+%%% Tuple passing version
+ body=#c_tuple{anno=[compiler_generated],
+ es=[#c_literal{val=[]}, Avar]}},
+%%% Multiple-value version
+%%% body=#c_values{es=[#c_literal{val=[]}, A]}},
+ Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Avar, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^mapfoldl',2}}|Anno], Err)},
+ Fun = #c_fun{vars=[Xs, Avar],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, Avar, L], arg=#c_values{es=[Arg1, Arg2, Arg3]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+%%% Tuple passing version
+ body=#c_apply{anno=Anno,
+ op=Loop,
+ args=[L, Avar]}}};
+%%% Multiple-value version
+%%% body=#c_let{vars=[Xs, A],
+%%% arg=#c_apply{op=Loop,
+%%% args=[L, A]},
+%%% body=#c_tuple{es=[Xs, A]}}}};
+call(#c_call{anno=Anno}, lists, mapfoldr, [Arg1,Arg2,Arg3]) ->
+ Loop = #c_var{name={'lists^mapfoldr',2}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ Avar = #c_var{name='A'},
+ Match =
+ fun (A, P, E) ->
+ C1 = #c_clause{pats=[P], guard=#c_literal{val=true}, body=E},
+ Err = #c_tuple{es=[#c_literal{val='badmatch'}, X]},
+ C2 = #c_clause{pats=[X], guard=#c_literal{val=true},
+ body=match_fail(Anno, Err)},
+ #c_case{arg=A, clauses=[C1, C2]}
+ end,
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+%%% Tuple passing version
+ body=Match(#c_apply{anno=Anno,
+ op=Loop,
+ args=[Xs, Avar]},
+ #c_tuple{es=[Xs, Avar]},
+ Match(#c_apply{anno=Anno, op=F, args=[X, Avar]},
+ #c_tuple{es=[X, Avar]},
+ #c_tuple{anno=[compiler_generated],
+ es=[#c_cons{anno=[compiler_generated],
+ hd=X, tl=Xs}, Avar]}))
+%%% Multiple-value version
+%%% body=#c_let{vars=[Xs,A],
+%%% %% The tuple will be optimised away
+%%% arg=#c_apply{op=Loop, args=[Xs, A]},
+%%% body=Match(#c_apply{op=F, args=[X, A]},
+%%% #c_tuple{es=[X, A]},
+%%% #c_values{es=[#c_cons{hd=X, tl=Xs},
+%%% A]})}
+ },
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=2}]},
+%%% Tuple passing version
+ body=#c_tuple{anno=[compiler_generated],
+ es=[#c_literal{val=[]}, Avar]}},
+%%% Multiple-value version
+%%% body=#c_values{es=[#c_literal{val=[]}, A]}},
+ Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Avar, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^mapfoldr',2}}|Anno], Err)},
+ Fun = #c_fun{vars=[Xs, Avar],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, Avar, L], arg=#c_values{es=[Arg1, Arg2, Arg3]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+%%% Tuple passing version
+ body=#c_apply{anno=Anno,
+ op=Loop,
+ args=[L, Avar]}}};
+%%% Multiple-value version
+%%% body=#c_let{vars=[Xs, A],
+%%% arg=#c_apply{op=Loop,
+%%% args=[L, A]},
+%%% body=#c_tuple{es=[Xs, A]}}}};
+call(_, _, _, _) ->
+ none.
+
+match_fail(Ann, Arg) ->
+ Name = cerl:abstract(match_fail),
+ Args = [Arg],
+ cerl:ann_c_primop(Ann, Name, Args).
diff --git a/lib/compiler/src/sys_core_inline.erl b/lib/compiler/src/sys_core_inline.erl
index 9f93acb666..1e3a735e9b 100644
--- a/lib/compiler/src/sys_core_inline.erl
+++ b/lib/compiler/src/sys_core_inline.erl
@@ -195,10 +195,10 @@ kill_id_anns(Body) ->
A = kill_id_anns_1(A0),
CFun#c_fun{anno=A};
(Expr) ->
- %% Mark everything as compiler generated to suppress
- %% bogus warnings.
- A = compiler_generated(core_lib:get_anno(Expr)),
- core_lib:set_anno(Expr, A)
+ %% Mark everything as compiler generated to
+ %% suppress bogus warnings.
+ A = compiler_generated(cerl:get_ann(Expr)),
+ cerl:set_ann(Expr, A)
end, Body).
kill_id_anns_1([{'id',_}|As]) ->
diff --git a/lib/compiler/src/v3_codegen.erl b/lib/compiler/src/v3_codegen.erl
index 8c1a0c08ac..cbe50b93b0 100644
--- a/lib/compiler/src/v3_codegen.erl
+++ b/lib/compiler/src/v3_codegen.erl
@@ -1523,9 +1523,11 @@ set_cg([{var,R}], {map,Op,Map,[{map_pair,{var,_}=K,V}]}, Le, Vdb, Bef,
List = [cg_reg_arg(K,Int0),cg_reg_arg(V,Int0)],
Live = max_reg(Bef#sr.reg),
- Int1 = Int0#sr{reg=put_reg(R, Int0#sr.reg)},
- Aft = clear_dead(Int1, Le#l.i, Vdb),
- Target = fetch_reg(R, Int1#sr.reg),
+
+ %% The target register can reuse one of the source registers.
+ Aft0 = clear_dead(Int0, Le#l.i, Vdb),
+ Aft = Aft0#sr{reg=put_reg(R, Aft0#sr.reg)},
+ Target = fetch_reg(R, Aft#sr.reg),
I = case Op of
assoc -> put_map_assoc;
@@ -1557,9 +1559,11 @@ set_cg([{var,R}], {map,Op,Map,Es}, Le, Vdb, Bef,
List = flatmap(fun({K,V}) -> [K,cg_reg_arg(V,Int0)] end, Pairs),
Live = max_reg(Bef#sr.reg),
- Int1 = Int0#sr{reg=put_reg(R, Int0#sr.reg)},
- Aft = clear_dead(Int1, Le#l.i, Vdb),
- Target = fetch_reg(R, Int1#sr.reg),
+
+ %% The target register can reuse one of the source registers.
+ Aft0 = clear_dead(Int0, Le#l.i, Vdb),
+ Aft = Aft0#sr{reg=put_reg(R, Aft0#sr.reg)},
+ Target = fetch_reg(R, Aft#sr.reg),
I = case Op of
assoc -> put_map_assoc;
diff --git a/lib/compiler/src/v3_core.erl b/lib/compiler/src/v3_core.erl
index 612660c2d6..3c19a209c0 100644
--- a/lib/compiler/src/v3_core.erl
+++ b/lib/compiler/src/v3_core.erl
@@ -66,6 +66,7 @@
%% match arguments are novars
%% case arguments are novars
%% receive timeouts are novars
+%% binaries and maps are novars
%% let/set arguments are expressions
%% fun is not a safe
@@ -105,7 +106,9 @@
-record(iset, {anno=#a{},var,arg}).
-record(itry, {anno=#a{},args,vars,body,evars,handler}).
-record(ifilter, {anno=#a{},arg}).
--record(igen, {anno=#a{},acc_pat,acc_guard,skip_pat,tail,tail_pat,arg}).
+-record(igen, {anno=#a{},ceps=[],acc_pat,acc_guard,
+ skip_pat,tail,tail_pat,arg}).
+-record(isimple, {anno=#a{},term :: cerl:cerl()}).
-type iapply() :: #iapply{}.
-type ibinary() :: #ibinary{}.
@@ -124,11 +127,12 @@
-type itry() :: #itry{}.
-type ifilter() :: #ifilter{}.
-type igen() :: #igen{}.
+-type isimple() :: #isimple{}.
-type i() :: iapply() | ibinary() | icall() | icase() | icatch()
| iclause() | ifun() | iletrec() | imatch() | iprimop()
| iprotect() | ireceive1() | ireceive2() | iset() | itry()
- | ifilter() | igen().
+ | ifilter() | igen() | isimple().
-type warning() :: {file:filename(), [{integer(), module(), term()}]}.
@@ -287,13 +291,15 @@ gexpr({protect,Line,Arg}, Bools0, St0) ->
{#iprotect{anno=#a{anno=Anno},body=Eps++[E]},[],Bools0,St}
end;
gexpr({op,L,'andalso',E1,E2}, Bools, St0) ->
- {#c_var{name=V0},St} = new_var(L, St0),
+ Anno = lineno_anno(L, St0),
+ {#c_var{name=V0},St} = new_var(Anno, St0),
V = {var,L,V0},
False = {atom,L,false},
E = make_bool_switch_guard(L, E1, V, E2, False),
gexpr(E, Bools, St);
gexpr({op,L,'orelse',E1,E2}, Bools, St0) ->
- {#c_var{name=V0},St} = new_var(L, St0),
+ Anno = lineno_anno(L, St0),
+ {#c_var{name=V0},St} = new_var(Anno, St0),
V = {var,L,V0},
True = {atom,L,true},
E = make_bool_switch_guard(L, E1, V, True, E2),
@@ -382,33 +388,30 @@ gexpr_test(E0, Bools0, St0) ->
Lanno = Anno#a.anno,
{New,St2} = new_var(Lanno, St1),
Bools = [New|Bools0],
- {#icall{anno=Anno, %Must have an #a{}
- module=#c_literal{anno=Lanno,val=erlang},
- name=#c_literal{anno=Lanno,val='=:='},
- args=[New,#c_literal{anno=Lanno,val=true}]},
+ {icall_eq_true(New),
Eps0 ++ [#iset{anno=Anno,var=New,arg=E1}],Bools,St2}
end;
_ ->
- Anno = get_ianno(E1),
Lanno = get_lineno_anno(E1),
+ ACompGen = #a{anno=[compiler_generated]},
case is_simple(E1) of
true ->
Bools = [E1|Bools0],
- {#icall{anno=Anno, %Must have an #a{}
- module=#c_literal{anno=Lanno,val=erlang},
- name=#c_literal{anno=Lanno,val='=:='},
- args=[E1,#c_literal{anno=Lanno,val=true}]},Eps0,Bools,St1};
+ {icall_eq_true(E1),Eps0,Bools,St1};
false ->
{New,St2} = new_var(Lanno, St1),
Bools = [New|Bools0],
- {#icall{anno=Anno, %Must have an #a{}
- module=#c_literal{anno=Lanno,val=erlang},
- name=#c_literal{anno=Lanno,val='=:='},
- args=[New,#c_literal{anno=Lanno,val=true}]},
- Eps0 ++ [#iset{anno=Anno,var=New,arg=E1}],Bools,St2}
+ {icall_eq_true(New),
+ Eps0 ++ [#iset{anno=ACompGen,var=New,arg=E1}],Bools,St2}
end
end.
+icall_eq_true(Arg) ->
+ #icall{anno=#a{anno=[compiler_generated]},
+ module=#c_literal{val=erlang},
+ name=#c_literal{val='=:='},
+ args=[Arg,#c_literal{val=true}]}.
+
force_booleans(Vs0, E, Eps, St) ->
Vs1 = [set_anno(V, []) || V <- Vs0],
Vs = unforce(E, Eps, Vs1),
@@ -418,16 +421,15 @@ force_booleans_1([], E, Eps, St) ->
{E,Eps,St};
force_booleans_1([V|Vs], E0, Eps0, St0) ->
{E1,Eps1,St1} = force_safe(E0, St0),
- Lanno = element(2, V),
- Anno = #a{anno=Lanno},
- Call = #icall{anno=Anno,module=#c_literal{anno=Lanno,val=erlang},
- name=#c_literal{anno=Lanno,val=is_boolean},
+ ACompGen = #a{anno=[compiler_generated]},
+ Call = #icall{anno=ACompGen,module=#c_literal{val=erlang},
+ name=#c_literal{val=is_boolean},
args=[V]},
- {New,St} = new_var(Lanno, St1),
- Iset = #iset{anno=Anno,var=New,arg=Call},
+ {New,St} = new_var([], St1),
+ Iset = #iset{var=New,arg=Call},
Eps = Eps0 ++ Eps1 ++ [Iset],
- E = #icall{anno=Anno,
- module=#c_literal{anno=Lanno,val=erlang},name=#c_literal{anno=Lanno,val='and'},
+ E = #icall{anno=ACompGen,
+ module=#c_literal{val=erlang},name=#c_literal{val='and'},
args=[E1,New]},
force_booleans_1(Vs, E, Eps, St).
@@ -514,28 +516,28 @@ exprs([], St) -> {[],St}.
%% Generate an internal core expression.
expr({var,L,V}, St) -> {#c_var{anno=lineno_anno(L, St),name=V},[],St};
-expr({char,L,C}, St) -> {#c_literal{anno=lineno_anno(L, St),val=C},[],St};
-expr({integer,L,I}, St) -> {#c_literal{anno=lineno_anno(L, St),val=I},[],St};
-expr({float,L,F}, St) -> {#c_literal{anno=lineno_anno(L, St),val=F},[],St};
-expr({atom,L,A}, St) -> {#c_literal{anno=lineno_anno(L, St),val=A},[],St};
-expr({nil,L}, St) -> {#c_literal{anno=lineno_anno(L, St),val=[]},[],St};
-expr({string,L,S}, St) -> {#c_literal{anno=lineno_anno(L, St),val=S},[],St};
+expr({char,L,C}, St) -> {#c_literal{anno=full_anno(L, St),val=C},[],St};
+expr({integer,L,I}, St) -> {#c_literal{anno=full_anno(L, St),val=I},[],St};
+expr({float,L,F}, St) -> {#c_literal{anno=full_anno(L, St),val=F},[],St};
+expr({atom,L,A}, St) -> {#c_literal{anno=full_anno(L, St),val=A},[],St};
+expr({nil,L}, St) -> {#c_literal{anno=full_anno(L, St),val=[]},[],St};
+expr({string,L,S}, St) -> {#c_literal{anno=full_anno(L, St),val=S},[],St};
expr({cons,L,H0,T0}, St0) ->
{H1,Hps,St1} = safe(H0, St0),
{T1,Tps,St2} = safe(T0, St1),
- A = lineno_anno(L, St2),
+ A = full_anno(L, St2),
{annotate_cons(A, H1, T1, St2),Hps ++ Tps,St2};
expr({lc,L,E,Qs0}, St0) ->
{Qs1,St1} = preprocess_quals(L, Qs0, St0),
lc_tq(L, E, Qs1, #c_literal{anno=lineno_anno(L, St1),val=[]}, St1);
expr({bc,L,E,Qs}, St) ->
- bc_tq(L, E, Qs, {nil,L}, St);
+ bc_tq(L, E, Qs, St);
expr({tuple,L,Es0}, St0) ->
{Es1,Eps,St1} = safe_list(Es0, St0),
A = record_anno(L, St1),
{annotate_tuple(A, Es1, St1),Eps,St1};
expr({map,L,Es0}, St0) ->
- map_build_pair_chain(#c_literal{val=#{}},Es0,lineno_anno(L,St0),St0);
+ map_build_pairs(#c_literal{val=#{}}, Es0, full_anno(L, St0), St0);
expr({map,L,M0,Es0}, St0) ->
try expr_map(M0,Es0,lineno_anno(L, St0),St0) of
{_,_,_}=Res -> Res
@@ -550,7 +552,7 @@ expr({map,L,M0,Es0}, St0) ->
args=As},[],St}
end;
expr({bin,L,Es0}, St0) ->
- try expr_bin(Es0, lineno_anno(L, St0), St0) of
+ try expr_bin(Es0, full_anno(L, St0), St0) of
{_,_,_}=Res -> Res
catch
throw:bad_binary ->
@@ -640,11 +642,11 @@ expr({'catch',L,E0}, St0) ->
Lanno = lineno_anno(L, St1),
{#icatch{anno=#a{anno=Lanno},body=Eps ++ [E1]},[],St1};
expr({'fun',L,{function,F,A},{_,_,_}=Id}, St) ->
- Lanno = lineno_anno(L, St),
+ Lanno = full_anno(L, St),
{#c_var{anno=Lanno++[{id,Id}],name={F,A}},[],St};
expr({'fun',L,{function,M,F,A}}, St0) ->
{As,Aps,St1} = safe_list([M,F,A], St0),
- Lanno = lineno_anno(L, St1),
+ Lanno = full_anno(L, St1),
{#icall{anno=#a{anno=Lanno},
module=#c_literal{val=erlang},
name=#c_literal{val=make_fun},
@@ -655,13 +657,9 @@ expr({named_fun,L,'_',Cs,Id}, St) ->
fun_tq(Id, Cs, L, St, unnamed);
expr({named_fun,L,Name,Cs,Id}, St) ->
fun_tq(Id, Cs, L, St, {named,Name});
-expr({call,L,{remote,_,M,F},As0}, #core{wanted=Wanted}=St0) ->
+expr({call,L,{remote,_,M,F},As0}, St0) ->
{[M1,F1|As1],Aps,St1} = safe_list([M,F|As0], St0),
- Lanno = lineno_anno(L, St1),
- Anno = case Wanted of
- false -> [result_not_wanted|Lanno];
- true -> Lanno
- end,
+ Anno = full_anno(L, St1),
{#icall{anno=#a{anno=Anno},module=M1,name=F1,args=As1},Aps,St1};
expr({call,Lc,{atom,Lf,F},As0}, St0) ->
{As1,Aps,St1} = safe_list(As0, St0),
@@ -710,26 +708,28 @@ expr({op,_,'++',{lc,Llc,E,Qs0},More}, St0) ->
{Y,Yps,St} = lc_tq(Llc, E, Qs, Mc, St2),
{Y,Mps++Yps,St};
expr({op,L,'andalso',E1,E2}, St0) ->
- {#c_var{name=V0},St} = new_var(L, St0),
+ Anno = lineno_anno(L, St0),
+ {#c_var{name=V0},St} = new_var(Anno, St0),
V = {var,L,V0},
False = {atom,L,false},
E = make_bool_switch(L, E1, V, E2, False, St0),
expr(E, St);
expr({op,L,'orelse',E1,E2}, St0) ->
- {#c_var{name=V0},St} = new_var(L, St0),
+ Anno = lineno_anno(L, St0),
+ {#c_var{name=V0},St} = new_var(Anno, St0),
V = {var,L,V0},
True = {atom,L,true},
E = make_bool_switch(L, E1, V, True, E2, St0),
expr(E, St);
expr({op,L,Op,A0}, St0) ->
{A1,Aps,St1} = safe(A0, St0),
- LineAnno = lineno_anno(L, St1),
+ LineAnno = full_anno(L, St1),
{#icall{anno=#a{anno=LineAnno}, %Must have an #a{}
module=#c_literal{anno=LineAnno,val=erlang},
name=#c_literal{anno=LineAnno,val=Op},args=[A1]},Aps,St1};
expr({op,L,Op,L0,R0}, St0) ->
{As,Aps,St1} = safe_list([L0,R0], St0),
- LineAnno = lineno_anno(L, St1),
+ LineAnno = full_anno(L, St1),
{#icall{anno=#a{anno=LineAnno}, %Must have an #a{}
module=#c_literal{anno=LineAnno,val=erlang},
name=#c_literal{anno=LineAnno,val=Op},args=As},Aps,St1}.
@@ -758,86 +758,47 @@ make_bool_switch_guard(L, E, V, T, F) ->
{clause,NegL,[V],[],[V]}
]}.
-expr_map(M0,Es0,A,St0) ->
- {M1,Mps,St1} = safe(M0, St0),
+expr_map(M0, Es0, A, St0) ->
+ {M1,Eps0,St1} = safe(M0, St0),
case is_valid_map_src(M1) of
true ->
- case {M1,Es0} of
- {#c_var{}, []} ->
- %% transform M#{} to is_map(M)
- {Vpat,St2} = new_var(St1),
- {Fpat,St3} = new_var(St2),
- Cs = [#iclause{
- anno=A,
- pats=[Vpat],
- guard=[#icall{anno=#a{anno=A},
+ {M2,Eps1,St2} = map_build_pairs(M1, Es0, A, St1),
+ M3 = case Es0 of
+ [] -> M1;
+ [_|_] -> M2
+ end,
+ Cs = [#iclause{
+ anno=#a{anno=[compiler_generated|A]},
+ pats=[],
+ guard=[#icall{anno=#a{anno=A},
module=#c_literal{anno=A,val=erlang},
name=#c_literal{anno=A,val=is_map},
- args=[Vpat]}],
- body=[Vpat]}],
- Fc = fail_clause([Fpat], A, #c_literal{val=badarg}),
- {#icase{anno=#a{anno=A},args=[M1],clauses=Cs,fc=Fc},Mps,St3};
- {_,_} ->
- {M2,Eps,St2} = map_build_pair_chain(M1,Es0,A,St1),
- {M2,Mps++Eps,St2}
- end;
- false -> throw({bad_map,bad_map})
- end.
-
-%% Group continuous literal blocks and single variables, i.e.
-%% M0#{ a := 1, b := V1, K1 := V2, K2 := 42}
-%% becomes equivalent to
-%% M1 = M0#{ a := 1, b := V1 },
-%% M2 = M1#{ K1 := V1 },
-%% M3 = M2#{ K2 := 42 }
-
-map_build_pair_chain(M,Es,A,St) ->
- %% hack, remove iset if only literal
- case map_build_pair_chain(M,Es,A,St,[]) of
- {_,[#iset{arg=#c_literal{}=Val}],St1} -> {Val,[],St1};
- Normal -> Normal
- end.
-
-map_build_pair_chain(M0,[],_,St,Mps) ->
- {M0,Mps,St};
-map_build_pair_chain(M0,Es0,A,St0,Mps) ->
- % group continuous literal blocks
- % Anno = #a{anno=[compiler_generated]},
- % order is important, we need to reverse the literals
- case map_pair_block(Es0,[],[],St0) of
- {{CesL,EspL},{[],[]},Es1,St1} ->
- {MVar,St2} = new_var(St1),
- Pre = [#iset{var=MVar, arg=ann_c_map(A,M0,reverse(CesL))}],
- map_build_pair_chain(MVar,Es1,A,St2,Mps++EspL++Pre);
- {{[],[]},{CesV,EspV},Es1,St1} ->
- {MVar,St2} = new_var(St1),
- Pre = [#iset{var=MVar, arg=#c_map{arg=M0,es=CesV, anno=A}}],
- map_build_pair_chain(MVar,Es1,A,St2,Mps ++ EspV++Pre);
- {{CesL,EspL},{CesV,EspV},Es1,St1} ->
- {MVarL,St2} = new_var(St1),
- {MVarV,St3} = new_var(St2),
- Pre = [#iset{var=MVarL, arg=ann_c_map(A,M0,reverse(CesL))},
- #iset{var=MVarV, arg=#c_map{arg=MVarL,es=CesV,anno=A}}],
- map_build_pair_chain(MVarV,Es1,A,St3,Mps++EspL++EspV++Pre)
+ args=[M1]}],
+ body=[M3]}],
+ Fc = fail_clause([], [eval_failure|A], #c_literal{val=badarg}),
+ Eps = Eps0 ++ Eps1,
+ {#icase{anno=#a{anno=A},args=[],clauses=Cs,fc=Fc},Eps,St2};
+ false ->
+ throw({bad_map,bad_map})
end.
-map_pair_block([{Op,L,K0,V0}|Es],Ces,Esp,St0) ->
- {K,Ep0,St1} = safe(K0, St0),
- {V,Ep1,St2} = safe(V0, St1),
- A = lineno_anno(L, St2),
- Pair0 = map_op_to_c_map_pair(Op),
- Pair1 = Pair0#c_map_pair{anno=A,key=K,val=V},
- case cerl:is_literal(K) of
- true ->
- map_pair_block(Es,[Pair1|Ces],Ep0 ++ Ep1 ++ Esp,St2);
- false ->
- {{Ces,Esp},{[Pair1],Ep0++Ep1},Es,St2}
- end;
-map_pair_block([],Ces,Esp,St) ->
- {{Ces,Esp},{[],[]},[],St}.
+map_build_pairs(Map, Es0, Ann, St0) ->
+ {Es,Pre,St1} = map_build_pairs_1(Es0, St0),
+ {ann_c_map(Ann, Map, Es),Pre,St1}.
+
+map_build_pairs_1([{Op0,L,K0,V0}|Es], St0) ->
+ {K,Pre0,St1} = safe(K0, St0),
+ {V,Pre1,St2} = safe(V0, St1),
+ {Pairs,Pre2,St3} = map_build_pairs_1(Es, St2),
+ As = lineno_anno(L, St3),
+ Op = map_op(Op0),
+ Pair = cerl:ann_c_map_pair(As, Op, K, V),
+ {[Pair|Pairs],Pre0++Pre1++Pre2,St3};
+map_build_pairs_1([], St) ->
+ {[],[],St}.
-map_op_to_c_map_pair(map_field_assoc) -> #c_map_pair{op=#c_literal{val=assoc}};
-map_op_to_c_map_pair(map_field_exact) -> #c_map_pair{op=#c_literal{val=exact}}.
+map_op(map_field_assoc) -> #c_literal{val=assoc};
+map_op(map_field_exact) -> #c_literal{val=exact}.
is_valid_map_src(#c_literal{val = M}) when is_map(M) -> true;
is_valid_map_src(#c_var{}) -> true;
@@ -1001,7 +962,7 @@ fun_tq({_,_,Name}=Id, Cs0, L, St0, NameInfo) ->
{Cs1,Ceps,St1} = clauses(Cs0, St0),
{Args,St2} = new_vars(Arity, St1),
{Ps,St3} = new_vars(Arity, St2), %Need new variables here
- Anno = lineno_anno(L, St3),
+ Anno = full_anno(L, St3),
Fc = function_clause(Ps, Anno, {Name,Arity}),
Fun = #ifun{anno=#a{anno=Anno},
id=[{id,Id}], %We KNOW!
@@ -1011,7 +972,8 @@ fun_tq({_,_,Name}=Id, Cs0, L, St0, NameInfo) ->
%% lc_tq(Line, Exp, [Qualifier], Mc, State) -> {LetRec,[PreExp],State}.
%% This TQ from Simon PJ pp 127-138.
-lc_tq(Line, E, [#igen{anno=GAnno,acc_pat=AccPat,acc_guard=AccGuard,
+lc_tq(Line, E, [#igen{anno=GAnno,ceps=Ceps,
+ acc_pat=AccPat,acc_guard=AccGuard,
skip_pat=SkipPat,tail=Tail,tail_pat=TailPat,
arg={Pre,Arg}}|Qs], Mc, St0) ->
{Name,St1} = new_fun_name("lc", St0),
@@ -1046,7 +1008,7 @@ lc_tq(Line, E, [#igen{anno=GAnno,acc_pat=AccPat,acc_guard=AccGuard,
Fun = #ifun{anno=LAnno,id=[],vars=[Var],clauses=Cs,fc=Fc},
{#iletrec{anno=LAnno#a{anno=[list_comprehension|LA]},defs=[{{Name,1},Fun}],
body=Pre ++ [#iapply{anno=LAnno,op=F,args=[Arg]}]},
- [],St4};
+ Ceps,St4};
lc_tq(Line, E, [#ifilter{}=Filter|Qs], Mc, St) ->
filter_tq(Line, E, Filter, Mc, St, Qs, fun lc_tq/5);
lc_tq(Line, E0, [], Mc0, St0) ->
@@ -1060,7 +1022,7 @@ lc_tq(Line, E0, [], Mc0, St0) ->
%% This TQ from Gustafsson ERLANG'05.
%% More could be transformed before calling bc_tq.
-bc_tq(Line, Exp, Qs0, _, St0) ->
+bc_tq(Line, Exp, Qs0, St0) ->
{BinVar,St1} = new_var(St0),
{Sz,SzPre,St2} = bc_initial_size(Exp, Qs0, St1),
{Qs,St3} = preprocess_quals(Line, Qs0, St2),
@@ -1071,7 +1033,8 @@ bc_tq(Line, Exp, Qs0, _, St0) ->
args=[Sz]}}] ++ BcPre,
{E,Pre,St}.
-bc_tq1(Line, E, [#igen{anno=GAnno,acc_pat=AccPat,acc_guard=AccGuard,
+bc_tq1(Line, E, [#igen{anno=GAnno,ceps=Ceps,
+ acc_pat=AccPat,acc_guard=AccGuard,
skip_pat=SkipPat,tail=Tail,tail_pat=TailPat,
arg={Pre,Arg}}|Qs], Mc, St0) ->
{Name,St1} = new_fun_name("lbc", St0),
@@ -1109,7 +1072,7 @@ bc_tq1(Line, E, [#igen{anno=GAnno,acc_pat=AccPat,acc_guard=AccGuard,
Fun = #ifun{anno=LAnno,id=[],vars=Vars,clauses=Cs,fc=Fc},
{#iletrec{anno=LAnno#a{anno=[list_comprehension|LA]},defs=[{{Name,2},Fun}],
body=Pre ++ [#iapply{anno=LAnno,op=F,args=[Arg,Mc]}]},
- [],St4};
+ Ceps,St4};
bc_tq1(Line, E, [#ifilter{}=Filter|Qs], Mc, St) ->
filter_tq(Line, E, Filter, Mc, St, Qs, fun bc_tq1/5);
bc_tq1(_, {bin,Bl,Elements}, [], AccVar, St0) ->
@@ -1245,8 +1208,9 @@ generator(Line, {generate,Lg,P0,E}, Gs, St0) ->
ann_c_cons(LA, Skip, Tail)}
end,
{Ce,Pre,St4} = safe(E, St3),
- Gen = #igen{anno=#a{anno=GA},acc_pat=AccPat,acc_guard=Cg,skip_pat=SkipPat,
- tail=Tail,tail_pat=#c_literal{anno=LA,val=[]},arg={Ceps++Pre,Ce}},
+ Gen = #igen{anno=#a{anno=GA},ceps=Ceps,
+ acc_pat=AccPat,acc_guard=Cg,skip_pat=SkipPat,
+ tail=Tail,tail_pat=#c_literal{anno=LA,val=[]},arg={Pre,Ce}},
{Gen,St4};
generator(Line, {b_generate,Lg,P,E}, Gs, St0) ->
LA = lineno_anno(Line, St0),
@@ -1515,6 +1479,7 @@ force_novars(#iapply{}=App, St) -> {App,[],St};
force_novars(#icall{}=Call, St) -> {Call,[],St};
force_novars(#ifun{}=Fun, St) -> {Fun,[],St}; %These are novars too
force_novars(#ibinary{}=Bin, St) -> {Bin,[],St};
+force_novars(#c_map{}=Bin, St) -> {Bin,[],St};
force_novars(Ce, St) ->
force_safe(Ce, St).
@@ -1634,7 +1599,7 @@ pattern({tuple,L,Ps}, St) ->
{annotate_tuple(record_anno(L, St), Ps1, St),Eps,St1};
pattern({map,L,Pairs}, St0) ->
{Ps,Eps,St1} = pattern_map_pairs(Pairs, St0),
- {#c_map{anno=lineno_anno(L, St1), es=Ps},Eps,St1};
+ {#c_map{anno=lineno_anno(L, St1),es=Ps,is_pat=true},Eps,St1};
pattern({bin,L,Ps}, St) ->
%% We don't create a #ibinary record here, since there is
%% no need to hold any used/new annotations in a pattern.
@@ -1656,49 +1621,30 @@ pattern_map_pairs(Ps, St) ->
{CMapPair,EpsP,Sti1} = pattern_map_pair(P,Sti0),
{CMapPair, {EpsM++EpsP,Sti1}}
end, {[],St}, Ps),
- {pat_alias_map_pairs(CMapPairs,[]),Eps,St1}.
-
-%% remove cluddering annotations
-pattern_map_clean_key(#c_literal{val=V}) -> {literal,V};
-pattern_map_clean_key(#c_var{name=V}) -> {var,V}.
-
-pat_alias_map_pairs(Ps1,Ps2) ->
- Ps = Ps1 ++ Ps2,
- F = fun(#c_map_pair{key=Ck,val=Cv},Dbi) ->
- K = pattern_map_clean_key(Ck),
- case dict:find(K,Dbi) of
- {ok,Cvs} -> dict:store(K,[Cv|Cvs],Dbi);
- _ -> dict:store(K,[Cv],Dbi)
- end
- end,
- Kdb = lists:foldl(F,dict:new(),Ps),
- pat_alias_map_pairs(Ps,Kdb,sets:new()).
-
-pat_alias_map_pairs([],_,_) -> [];
-pat_alias_map_pairs([#c_map_pair{key=Ck}=Pair|Pairs],Kdb,Set) ->
- K = pattern_map_clean_key(Ck),
- case sets:is_element(K,Set) of
- true ->
- pat_alias_map_pairs(Pairs,Kdb,Set);
- false ->
- Cvs = dict:fetch(K,Kdb),
- Cv = pat_alias_map_pair_values(Cvs),
- Set1 = sets:add_element(K,Set),
- [Pair#c_map_pair{val=Cv}|pat_alias_map_pairs(Pairs,Kdb,Set1)]
- end.
-
-pat_alias_map_pair_values([Cv]) -> Cv;
-pat_alias_map_pair_values([Cv1,Cv2|Cvs]) ->
- pat_alias_map_pair_values([pat_alias(Cv1,Cv2)|Cvs]).
+ {pat_alias_map_pairs(CMapPairs),Eps,St1}.
pattern_map_pair({map_field_exact,L,K,V}, St0) ->
- {Ck,EpsK,St1} = safe_pattern_expr(K,St0),
+ {Ck,EpsK,St1} = safe_pattern_expr(K, St0),
{Cv,EpsV,St2} = pattern(V, St1),
- {#c_map_pair{anno=lineno_anno(L,St2),
+ {#c_map_pair{anno=lineno_anno(L, St2),
op=#c_literal{val=exact},
key=Ck,
val=Cv},EpsK++EpsV,St2}.
+pat_alias_map_pairs(Ps) ->
+ D = foldl(fun(#c_map_pair{key=K0}=Pair, D0) ->
+ K = cerl:set_ann(K0, []),
+ dict:append(K, Pair, D0)
+ end, dict:new(), Ps),
+ pat_alias_map_pairs_1(dict:to_list(D)).
+
+pat_alias_map_pairs_1([{_,[#c_map_pair{val=V0}=Pair|Vs]}|T]) ->
+ V = foldl(fun(#c_map_pair{val=V}, Pat) ->
+ pat_alias(V, Pat)
+ end, V0, Vs),
+ [Pair#c_map_pair{val=V}|pat_alias_map_pairs_1(T)];
+pat_alias_map_pairs_1([]) -> [].
+
%% pat_bin([BinElement], State) -> [BinSeg].
pat_bin(Ps, St) -> [pat_segment(P, St) || P <- Ps].
@@ -1740,7 +1686,7 @@ pat_alias(#c_tuple{anno=Anno,es=Es1}, #c_tuple{es=Es2}) ->
%% alias maps
%% There are no literals in maps patterns (patterns are always abstract)
pat_alias(#c_map{es=Es1}=M,#c_map{es=Es2}) ->
- M#c_map{es=pat_alias_map_pairs(Es1,Es2)};
+ M#c_map{es=pat_alias_map_pairs(Es1++Es2)};
pat_alias(#c_alias{var=V1,pat=P1},
#c_alias{var=V2,pat=P2}) ->
@@ -1794,7 +1740,7 @@ new_var_name(#core{vcount=C}=St) ->
new_var(St) ->
new_var([], St).
-new_var(Anno, St0) ->
+new_var(Anno, St0) when is_list(Anno) ->
{New,St} = new_var_name(St0),
{#c_var{anno=Anno,name=New},St}.
@@ -1852,7 +1798,7 @@ uclauses(Lcs, Ks, St0) ->
uclause(Cl0, Ks, St0) ->
{Cl1,_Pvs,Used,New,St1} = uclause(Cl0, Ks, Ks, St0),
- A0 = get_ianno(Cl1),
+ A0 = get_anno(Cl1),
A = A0#a{us=Used,ns=New},
{Cl1#iclause{anno=A},St1}.
@@ -2021,11 +1967,11 @@ uexpr(#ibinary{anno=A,segments=Ss}, _, St) ->
uexpr(#c_literal{}=Lit, _, St) ->
Anno = get_anno(Lit),
{set_anno(Lit, #a{us=[],anno=Anno}),St};
-uexpr(Lit, _, St) ->
- true = is_simple(Lit), %Sanity check!
- Vs = lit_vars(Lit),
- Anno = get_anno(Lit),
- {set_anno(Lit, #a{us=Vs,anno=Anno}),St}.
+uexpr(Simple, _, St) ->
+ true = is_simple(Simple), %Sanity check!
+ Vs = lit_vars(Simple),
+ Anno = get_anno(Simple),
+ {#isimple{anno=#a{us=Vs,anno=Anno},term=Simple},St}.
uexpr_list(Les0, Ks, St0) ->
mapfoldl(fun (Le, St) -> uexpr(Le, Ks, St) end, St0, Les0).
@@ -2039,7 +1985,7 @@ ufun_clauses(Lcs, Ks, St0) ->
ufun_clause(Cl0, Ks, St0) ->
{Cl1,Pvs,Used,_,St1} = uclause(Cl0, [], Ks, St0),
- A0 = get_ianno(Cl1),
+ A0 = get_anno(Cl1),
A = A0#a{us=subtract(intersection(Used, Ks), Pvs),ns=[]},
{Cl1#iclause{anno=A},St1}.
@@ -2202,7 +2148,8 @@ cguard(Gs, St0) ->
cexprs([#iset{var=#c_var{name=Name}=Var}=Iset], As, St) ->
%% Make return value explicit, and make Var true top level.
- cexprs([Iset,Var#c_var{anno=#a{us=[Name]}}], As, St);
+ Isimple = #isimple{anno=#a{us=[Name]},term=Var},
+ cexprs([Iset,Isimple], As, St);
cexprs([Le], As, St0) ->
{Ce,Es,Us,St1} = cexpr(Le, As, St0),
Exp = make_vars(As), %The export variables
@@ -2317,12 +2264,9 @@ cexpr(#c_literal{}=Lit, _As, St) ->
Anno = get_anno(Lit),
Vs = Anno#a.us,
{set_anno(Lit, Anno#a.anno),[],Vs,St};
-cexpr(Lit, _As, St) ->
- true = is_simple(Lit), %Sanity check!
- Anno = get_anno(Lit),
- Vs = Anno#a.us,
- %%Vs = lit_vars(Lit),
- {set_anno(Lit, Anno#a.anno),[],Vs,St}.
+cexpr(#isimple{anno=#a{us=Vs},term=Simple}, _As, St) ->
+ true = is_simple(Simple), %Sanity check!
+ {Simple,[],Vs,St}.
cfun(#ifun{anno=A,id=Id,vars=Args,clauses=Lcs,fc=Lfc}, _As, St0) ->
{Ccs,St1} = cclauses(Lcs, [], St0), %NEVER export!
@@ -2345,11 +2289,6 @@ lit_vars(#c_map_pair{key=K,val=V}, Vs) -> lit_vars(K, lit_vars(V, Vs));
lit_vars(#c_var{name=V}, Vs) -> add_element(V, Vs);
lit_vars(_, Vs) -> Vs. %These are atomic
-% lit_bin_vars(Segs, Vs) ->
-% foldl(fun (#c_bitstr{val=V,size=S}, Vs0) ->
-% lit_vars(V, lit_vars(S, Vs0))
-% end, Vs, Segs).
-
lit_list_vars(Ls) -> lit_list_vars(Ls, []).
lit_list_vars(Ls, Vs) ->
@@ -2368,16 +2307,21 @@ record_anno(L, St) when L >= ?REC_OFFSET ->
true ->
[record | lineno_anno(L - ?REC_OFFSET, St)];
false ->
- lineno_anno(L, St)
+ full_anno(L, St)
end;
record_anno(L, St) when L < -?REC_OFFSET ->
case member(dialyzer, St#core.opts) of
true ->
[record | lineno_anno(L + ?REC_OFFSET, St)];
false ->
- lineno_anno(L, St)
+ full_anno(L, St)
end;
record_anno(L, St) ->
+ full_anno(L, St).
+
+full_anno(L, #core{wanted=false}=St) ->
+ [result_not_wanted|lineno_anno(L, St)];
+full_anno(L, #core{wanted=true}=St) ->
lineno_anno(L, St).
lineno_anno(L, St) ->
@@ -2389,12 +2333,6 @@ lineno_anno(L, St) ->
[Line] ++ St#core.file
end.
-get_ianno(Ce) ->
- case get_anno(Ce) of
- #a{}=A -> A;
- A when is_list(A) -> #a{anno=A}
- end.
-
get_lineno_anno(Ce) ->
case get_anno(Ce) of
#a{anno=A} -> A;
diff --git a/lib/compiler/src/v3_kernel.erl b/lib/compiler/src/v3_kernel.erl
index 6504351c02..0ac1aaf158 100644
--- a/lib/compiler/src/v3_kernel.erl
+++ b/lib/compiler/src/v3_kernel.erl
@@ -131,12 +131,12 @@ module(#c_module{anno=A,name=M,exports=Es,attrs=As,defs=Fs}, _Options) ->
{ok,#k_mdef{anno=A,name=M#c_literal.val,exports=Kes,attributes=Kas,
body=Kfs ++ St#kern.funs},lists:sort(St#kern.ws)}.
-attributes([{#c_literal{val=Name},Val}|As]) ->
+attributes([{#c_literal{val=Name},#c_literal{val=Val}}|As]) ->
case include_attribute(Name) of
false ->
attributes(As);
true ->
- [{Name,core_lib:literal_value(Val)}|attributes(As)]
+ [{Name,Val}|attributes(As)]
end;
attributes([]) -> [].
@@ -273,17 +273,7 @@ expr(#c_tuple{anno=A,es=Ces}, Sub, St0) ->
{Kes,Ep,St1} = atomic_list(Ces, Sub, St0),
{#k_tuple{anno=A,es=Kes},Ep,St1};
expr(#c_map{anno=A,arg=Var,es=Ces}, Sub, St0) ->
- try expr_map(A,Var,Ces,Sub,St0) of
- {_,_,_}=Res -> Res
- catch
- throw:bad_map ->
- St1 = add_warning(get_line(A), bad_map, A, St0),
- Erl = #c_literal{val=erlang},
- Name = #c_literal{val=error},
- Args = [#c_literal{val=badarg}],
- Error = #c_call{anno=A,module=Erl,name=Name,args=Args},
- expr(Error, Sub, St1)
- end;
+ expr_map(A, Var, Ces, Sub, St0);
expr(#c_binary{anno=A,segments=Cv}, Sub, St0) ->
try atomic_bin(Cv, Sub, St0) of
{Kv,Ep,St1} ->
@@ -506,77 +496,81 @@ translate_fc(Args) ->
[#c_literal{val=function_clause},make_list(Args)].
expr_map(A,Var0,Ces,Sub,St0) ->
- %% An extra pass of validation of Map src because of inlining
{Var,Mps,St1} = expr(Var0, Sub, St0),
- case is_valid_map_src(Var) of
- true ->
- {Km,Eps,St2} = map_split_pairs(A, Var, Ces, Sub, St1),
- {Km,Eps++Mps,St2};
- false -> throw(bad_map)
- end.
-
-is_valid_map_src(#k_map{}) -> true;
-is_valid_map_src(#k_literal{val=M}) when is_map(M) -> true;
-is_valid_map_src(#k_var{}) -> true;
-is_valid_map_src(_) -> false.
+ {Km,Eps,St2} = map_split_pairs(A, Var, Ces, Sub, St1),
+ {Km,Eps++Mps,St2}.
map_split_pairs(A, Var, Ces, Sub, St0) ->
- %% two steps
- %% 1. force variables
- %% 2. remove multiples
- Pairs0 = [{Op,K,V} || #c_map_pair{op=#c_literal{val=Op},key=K,val=V} <- Ces],
+ %% 1. Force variables.
+ %% 2. Group adjacent pairs with literal keys.
+ %% 3. Within each such group, remove multiple assignments to the same key.
+ %% 4. Partition each group according to operator ('=>' and ':=').
+ Pairs0 = [{Op,K,V} ||
+ #c_map_pair{op=#c_literal{val=Op},key=K,val=V} <- Ces],
{Pairs,Esp,St1} = foldr(fun
({Op,K0,V0}, {Ops,Espi,Sti0}) when Op =:= assoc; Op =:= exact ->
{K,Eps1,Sti1} = atomic(K0, Sub, Sti0),
{V,Eps2,Sti2} = atomic(V0, Sub, Sti1),
{[{Op,K,V}|Ops],Eps1 ++ Eps2 ++ Espi,Sti2}
end, {[],[],St0}, Pairs0),
-
- case map_group_pairs(Pairs) of
- {Assoc,[]} ->
- Kes = [#k_map_pair{key=K,val=V}||{_,{assoc,K,V}} <- Assoc],
- {#k_map{anno=A,op=assoc,var=Var,es=Kes},Esp,St1};
- {[],Exact} ->
- Kes = [#k_map_pair{key=K,val=V}||{_,{exact,K,V}} <- Exact],
- {#k_map{anno=A,op=exact,var=Var,es=Kes},Esp,St1};
- {Assoc,Exact} ->
- Kes1 = [#k_map_pair{key=K,val=V}||{_,{assoc,K,V}} <- Assoc],
- {Mvar,Em,St2} = force_atomic(#k_map{anno=A,op=assoc,var=Var,es=Kes1},St1),
- Kes2 = [#k_map_pair{key=K,val=V}||{_,{exact,K,V}} <- Exact],
- {#k_map{anno=A,op=exact,var=Mvar,es=Kes2},Esp ++ Em,St2}
-
+ map_split_pairs_1(A, Var, Pairs, Esp, St1).
+
+map_split_pairs_1(A, Map0, [{Op,Key,Val}|Pairs1]=Pairs0, Esp0, St0) ->
+ {Map1,Em,St1} = force_atomic(Map0, St0),
+ case Key of
+ #k_var{} ->
+ %% Don't combine variable keys with other keys.
+ Kes = [#k_map_pair{key=Key,val=Val}],
+ Map = #k_map{anno=A,op=Op,var=Map1,es=Kes},
+ map_split_pairs_1(A, Map, Pairs1, Esp0 ++ Em, St1);
+ _ ->
+ %% Literal key. Split off all literal keys.
+ {L,Pairs} = splitwith(fun({_,#k_var{},_}) -> false;
+ ({_,_,_}) -> true
+ end, Pairs0),
+ {Map,Esp,St2} = map_group_pairs(A, Map1, L, Esp0 ++ Em, St1),
+ map_split_pairs_1(A, Map, Pairs, Esp, St2)
+ end;
+map_split_pairs_1(_, Map, [], Esp, St0) ->
+ {Map,Esp,St0}.
+
+map_group_pairs(A, Var, Pairs0, Esp, St0) ->
+ Pairs = map_remove_dup_keys(Pairs0),
+ Assoc = [#k_map_pair{key=K,val=V} || {_,{assoc,K,V}} <- Pairs],
+ Exact = [#k_map_pair{key=K,val=V} || {_,{exact,K,V}} <- Pairs],
+ case {Assoc,Exact} of
+ {[_|_],[]} ->
+ {#k_map{anno=A,op=assoc,var=Var,es=Assoc},Esp,St0};
+ {[],[_|_]} ->
+ {#k_map{anno=A,op=exact,var=Var,es=Exact},Esp,St0};
+ {[_|_],[_|_]} ->
+ Map = #k_map{anno=A,op=assoc,var=Var,es=Assoc},
+ {Mvar,Em,St1} = force_atomic(Map, St0),
+ {#k_map{anno=A,op=exact,var=Mvar,es=Exact},Esp ++ Em,St1}
end.
-%% Group map by Assoc operations and Exact operations
+map_remove_dup_keys(Es) ->
+ dict:to_list(map_remove_dup_keys(Es, dict:new())).
-map_group_pairs(Es) ->
- Groups = dict:to_list(map_group_pairs(Es,dict:new())),
- partition(fun({_,{Op,_,_}}) -> Op =:= assoc end, Groups).
-
-map_group_pairs([{assoc,K,V}|Es0],Used0) ->
- Used1 = case map_key_is_used(K,Used0) of
- {ok, {assoc,_,_}} -> map_key_set_used(K,{assoc,K,V},Used0);
- {ok, {exact,_,_}} -> map_key_set_used(K,{exact,K,V},Used0);
- _ -> map_key_set_used(K,{assoc,K,V},Used0)
- end,
- map_group_pairs(Es0,Used1);
-map_group_pairs([{exact,K,V}|Es0],Used0) ->
- Used1 = case map_key_is_used(K,Used0) of
- {ok, {assoc,_,_}} -> map_key_set_used(K,{assoc,K,V},Used0);
- {ok, {exact,_,_}} -> map_key_set_used(K,{exact,K,V},Used0);
- _ -> map_key_set_used(K,{exact,K,V},Used0)
- end,
- map_group_pairs(Es0,Used1);
-map_group_pairs([],Used) ->
- Used.
-
-map_key_set_used(K,How,Used) ->
- dict:store(map_key_clean(K),How,Used).
-
-map_key_is_used(K,Used) ->
- dict:find(map_key_clean(K),Used).
+map_remove_dup_keys([{assoc,K0,V}|Es0],Used0) ->
+ K = map_key_clean(K0),
+ Op = case dict:find(K, Used0) of
+ {ok,{exact,_,_}} -> exact;
+ _ -> assoc
+ end,
+ Used1 = dict:store(K, {Op,K0,V}, Used0),
+ map_remove_dup_keys(Es0, Used1);
+map_remove_dup_keys([{exact,K0,V}|Es0],Used0) ->
+ K = map_key_clean(K0),
+ Op = case dict:find(K, Used0) of
+ {ok,{assoc,_,_}} -> assoc;
+ _ -> exact
+ end,
+ Used1 = dict:store(K, {Op,K0,V}, Used0),
+ map_remove_dup_keys(Es0, Used1);
+map_remove_dup_keys([], Used) -> Used.
-%% Be explicit instead of using set_kanno(K,[])
+%% Be explicit instead of using set_kanno(K, []).
map_key_clean(#k_var{name=V}) -> {var,V};
map_key_clean(#k_literal{val=V}) -> {lit,V};
map_key_clean(#k_int{val=V}) -> {lit,V};
@@ -661,12 +655,12 @@ atomic_bin([#c_bitstr{anno=A,val=E0,size=S0,unit=U0,type=T,flags=Fs0}|Es0],
{E,Ap1,St1} = atomic(E0, Sub, St0),
{S1,Ap2,St2} = atomic(S0, Sub, St1),
validate_bin_element_size(S1),
- U1 = core_lib:literal_value(U0),
- Fs1 = core_lib:literal_value(Fs0),
+ U1 = cerl:concrete(U0),
+ Fs1 = cerl:concrete(Fs0),
{Es,Ap3,St3} = atomic_bin(Es0, Sub, St2),
{#k_bin_seg{anno=A,size=S1,
unit=U1,
- type=core_lib:literal_value(T),
+ type=cerl:concrete(T),
flags=Fs1,
seg=E,next=Es},
Ap1++Ap2++Ap3,St3};
@@ -793,8 +787,8 @@ pattern_bin_1([#c_bitstr{anno=A,val=E0,size=S0,unit=U,type=T,flags=Fs}|Es0],
%% problems.
#k_atom{val=bad_size}
end,
- U0 = core_lib:literal_value(U),
- Fs0 = core_lib:literal_value(Fs),
+ U0 = cerl:concrete(U),
+ Fs0 = cerl:concrete(Fs),
%%ok= io:fwrite("~w: ~p~n", [?LINE,{B0,S,U0,Fs0}]),
{E,Osub1,St2} = pattern(E0, Isub0, Osub0, St1),
Isub1 = case E0 of
@@ -805,7 +799,7 @@ pattern_bin_1([#c_bitstr{anno=A,val=E0,size=S0,unit=U,type=T,flags=Fs}|Es0],
{Es,{Isub,Osub},St3} = pattern_bin_1(Es0, Isub1, Osub1, St2),
{#k_bin_seg{anno=A,size=S,
unit=U0,
- type=core_lib:literal_value(T),
+ type=cerl:concrete(T),
flags=Fs0,
seg=E,next=Es},
{Isub,Osub},St3};
@@ -2010,9 +2004,7 @@ format_error(nomatch_shadow) ->
format_error(bad_call) ->
"invalid module and/or function name; this call will always fail";
format_error(bad_segment_size) ->
- "binary construction will fail because of a type mismatch";
-format_error(bad_map) ->
- "map construction will fail because of a type mismatch".
+ "binary construction will fail because of a type mismatch".
add_warning(none, Term, Anno, #kern{ws=Ws}=St) ->
File = get_file(Anno),
diff --git a/lib/compiler/test/Makefile b/lib/compiler/test/Makefile
index 892a401c75..73d52a48bc 100644
--- a/lib/compiler/test/Makefile
+++ b/lib/compiler/test/Makefile
@@ -11,6 +11,7 @@ MODULES= \
beam_validator_SUITE \
beam_disasm_SUITE \
beam_except_SUITE \
+ beam_utils_SUITE \
bs_bincomp_SUITE \
bs_bit_binaries_SUITE \
bs_construct_SUITE \
@@ -40,6 +41,7 @@ NO_OPT= \
andor \
apply \
beam_except \
+ beam_utils \
bs_construct \
bs_match \
bs_utf \
@@ -59,6 +61,7 @@ NO_OPT= \
INLINE= \
andor \
apply \
+ beam_utils \
bs_bincomp \
bs_bit_binaries \
bs_construct \
diff --git a/lib/compiler/test/andor_SUITE.erl b/lib/compiler/test/andor_SUITE.erl
index b5408ecd8f..3199440d84 100644
--- a/lib/compiler/test/andor_SUITE.erl
+++ b/lib/compiler/test/andor_SUITE.erl
@@ -33,7 +33,7 @@ all() ->
[{group,p}].
groups() ->
- [{p,test_lib:parallel(),
+ [{p,[parallel],
[t_case,t_and_or,t_andalso,t_orelse,inside,overlap,
combined,in_case,before_and_inside_if]}].
@@ -173,7 +173,13 @@ t_and_or(Config) when is_list(Config) ->
true = (fun (X = true) when X or true or X -> true end)(True),
- ok.
+ Tuple = id({a,b}),
+ case Tuple of
+ {_,_} ->
+ {'EXIT',{badarg,_}} = (catch true and Tuple)
+ end,
+
+ ok.
t_andalso(Config) when is_list(Config) ->
Bs = [true,false],
diff --git a/lib/compiler/test/beam_utils_SUITE.erl b/lib/compiler/test/beam_utils_SUITE.erl
new file mode 100644
index 0000000000..d2e24cb5ae
--- /dev/null
+++ b/lib/compiler/test/beam_utils_SUITE.erl
@@ -0,0 +1,236 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(beam_utils_SUITE).
+
+-export([all/0,suite/0,groups/0,init_per_suite/1,end_per_suite/1,
+ init_per_group/2,end_per_group/2,
+ apply_fun/1,apply_mf/1,bs_init/1,bs_save/1,
+ is_not_killed/1,is_not_used_at/1,
+ select/1,y_catch/1]).
+-export([id/1]).
+
+suite() -> [{ct_hooks,[ts_install_cth]}].
+
+all() ->
+ test_lib:recompile(?MODULE),
+ [{group,p}].
+
+groups() ->
+ [{p,[parallel],
+ [apply_fun,
+ apply_mf,
+ bs_init,
+ bs_save,
+ is_not_killed,
+ is_not_used_at,
+ select,
+ y_catch
+ ]}].
+
+init_per_suite(Config) ->
+ Config.
+
+end_per_suite(_Config) ->
+ ok.
+
+init_per_group(_GroupName, Config) ->
+ Config.
+
+end_per_group(_GroupName, Config) ->
+ Config.
+
+apply_fun(_Config) ->
+ 3 = do_apply_fun(false, false),
+ 3 = do_apply_fun(false, true),
+ 3 = do_apply_fun(true, false),
+ 2 = do_apply_fun(true, true),
+ ok.
+
+do_apply_fun(X, Y) ->
+ F = fun(I) -> I+1 end,
+ Arg = case X andalso id(Y) of
+ true -> 1;
+ false -> 2
+ end,
+ F(Arg).
+
+apply_mf(_Config) ->
+ ok = do_apply_mf_used({a,b}, ?MODULE, id),
+ error = do_apply_mf_used([a], ?MODULE, id),
+ {'EXIT',{{case_clause,{[],b}},_}} = (catch do_apply_mf_used({[],b}, ?MODULE, id)),
+
+ error = do_apply_mf_killed({error,[a]}, ?MODULE, id),
+ ok = do_apply_mf_killed([b], ?MODULE, id),
+ {'EXIT',{{case_clause,{a,[b]}},_}} = (catch do_apply_mf_killed({a,[b]}, ?MODULE, id)),
+ {'EXIT',{{case_clause,{error,[]}},_}} = (catch do_apply_mf_killed({error,[]}, ?MODULE, id)),
+
+ ok.
+
+do_apply_mf_used(Arg, Mod, Func) ->
+ Res = case id(Arg) of
+ {Decoded,_} when Decoded =/= [] ->
+ ok;
+ List when is_list(List) ->
+ error
+ end,
+ Mod:Func(Res).
+
+do_apply_mf_killed(Arg, Mod, Func) ->
+ Res = case id(Arg) of
+ {Tag,Decoded} when Decoded =/= [], Tag =:= error ->
+ error;
+ List when is_list(List) ->
+ ok
+ end,
+ Mod:Func(Res).
+
+bs_init(_Config) ->
+ <<7>> = do_bs_init_1([?MODULE], 7),
+ error = do_bs_init_1([?MODULE], 0.0),
+ error = do_bs_init_1([?MODULE], -43),
+ error = do_bs_init_1([?MODULE], 42),
+
+ <<>> = do_bs_init_2([]),
+ <<0:32,((1 bsl 32)-1):32>> = do_bs_init_2([0,(1 bsl 32)-1]),
+ {'EXIT',{badarg,_}} = (catch do_bs_init_2([0.5])),
+ {'EXIT',{badarg,_}} = (catch do_bs_init_2([-1])),
+ {'EXIT',{badarg,_}} = (catch do_bs_init_2([1 bsl 32])),
+ ok.
+
+do_bs_init_1([?MODULE], Sz) ->
+ if
+ is_integer(Sz), Sz >= -42, Sz < 42 ->
+ id(<<Sz:8>>);
+ true ->
+ error
+ end.
+
+do_bs_init_2(SigNos) ->
+ << <<SigNo:32>> ||
+ SigNo <- SigNos,
+ (is_integer(SigNo) andalso SigNo >= 0 andalso SigNo < (1 bsl 32)) orelse
+ erlang:error(badarg)
+ >>.
+
+
+bs_save(_Config) ->
+ {a,30,<<>>} = do_bs_save(<<1:1,30:5>>),
+ {b,127,<<>>} = do_bs_save(<<1:1,31:5,0:1,127:7>>),
+ {c,127,<<>>} = do_bs_save(<<1:1,31:5,1:1,127:7>>),
+ {c,127,<<>>} = do_bs_save(<<0:1,31:5,1:1,127:7>>),
+ {d,1024,<<>>} = do_bs_save(<<0:1,31:5>>),
+ ok.
+
+do_bs_save(<<_:1, Tag:5, T/binary>>) when Tag < 31 ->
+ {a,Tag,T};
+do_bs_save(<<1:1, 31:5, 0:1, Tag:7, T/binary>>) ->
+ {b,Tag,T};
+do_bs_save(<<_:1, 31:5, 1:1, Tag:7, T/binary>>) ->
+ {c,Tag,T};
+do_bs_save(<<_:1, 31:5, T/binary>>) ->
+ {d,1024,T}.
+
+is_not_killed(_Config) ->
+ {Pid,Ref} = spawn_monitor(fun() -> exit(banan) end),
+ receive
+ {'DOWN', Ref, process, Pid, banan} ->
+ ok
+ end,
+ receive after 0 -> ok end.
+
+is_not_used_at(_Config) ->
+ {a,b} = do_is_not_used_at(a, [{a,b}]),
+ {a,b} = do_is_not_used_at(a, [x,{a,b}]),
+ {a,b} = do_is_not_used_at(a, [{x,y},{a,b}]),
+ none = do_is_not_used_at(z, [{a,b}]),
+ none = do_is_not_used_at(a, [x]),
+ none = do_is_not_used_at(a, [{x,y}]),
+ ok.
+
+do_is_not_used_at(Key, [P|Ps]) ->
+ if
+ tuple_size(P) >= 1, element(1, P) =:= Key ->
+ P;
+ true ->
+ do_is_not_used_at(Key, Ps)
+ end;
+do_is_not_used_at(_Key, []) -> none.
+
+-record(select, {fixed=false}).
+
+select(_Config) ->
+ a = do_select(#select{}, 0, 0),
+ b = do_select(#select{}, 0, 1),
+ c = do_select(#select{fixed=true}, 0, 0),
+ c = do_select(#select{fixed=true}, 0, 1),
+ ok.
+
+do_select(Head, OldSize, BSize) ->
+ Overwrite0 =
+ if
+ OldSize =:= BSize -> same;
+ true -> true
+ end,
+ Overwrite =
+ if
+ Head#select.fixed =/= false ->
+ false;
+ true ->
+ Overwrite0
+ end,
+ if
+ Overwrite =:= same ->
+ a;
+ Overwrite ->
+ b;
+ true ->
+ c
+ end.
+
+y_catch(_Config) ->
+ ok = try
+ do_y_catch(<<"<?xmlX">>, {state}),
+ failed
+ catch
+ throw:{<<"<?xmlX">>,{state}} ->
+ ok
+ end.
+
+do_y_catch(<<"<?xml",Rest0/binary>> = Bytes, State0) ->
+ {Rest1,State1} =
+ case do_y_catch_1(Rest0, State0) of
+ false ->
+ {Bytes,State0};
+ true ->
+ {_XmlAttributes, R, S} = do_y_catch_2(Rest0),
+ {R,S}
+ end,
+ case catch id({Rest1,State1}) of
+ Other ->
+ throw(Other)
+ end.
+
+do_y_catch_1(<<_,_/binary>>, _) ->
+ false.
+
+do_y_catch_2(_) -> {a,b,c}.
+
+
+%% The identity function.
+id(I) -> I.
diff --git a/lib/compiler/test/beam_validator_SUITE.erl b/lib/compiler/test/beam_validator_SUITE.erl
index 626f89ba7a..1b1c7db0e8 100644
--- a/lib/compiler/test/beam_validator_SUITE.erl
+++ b/lib/compiler/test/beam_validator_SUITE.erl
@@ -21,16 +21,17 @@
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2,
init_per_testcase/2,end_per_testcase/2,
- beam_files/1,compiler_bug/1,stupid_but_valid/1,
+ compiler_bug/1,stupid_but_valid/1,
xrange/1,yrange/1,stack/1,call_last/1,merge_undefined/1,
uninit/1,unsafe_catch/1,
- dead_code/1,mult_labels/1,
+ dead_code/1,
overwrite_catchtag/1,overwrite_trytag/1,accessing_tags/1,bad_catch_try/1,
cons_guard/1,
freg_range/1,freg_uninit/1,freg_state/1,
- bin_match/1,bad_bin_match/1,bin_aligned/1,bad_dsetel/1,
+ bad_bin_match/1,bin_aligned/1,bad_dsetel/1,
state_after_fault_in_catch/1,no_exception_in_catch/1,
- undef_label/1,illegal_instruction/1,failing_gc_guard_bif/1]).
+ undef_label/1,illegal_instruction/1,failing_gc_guard_bif/1,
+ map_field_lists/1]).
-include_lib("test_server/include/test_server.hrl").
@@ -47,18 +48,19 @@ suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
test_lib:recompile(?MODULE),
- [beam_files,{group,p}].
+ [{group,p}].
groups() ->
[{p,test_lib:parallel(),
[compiler_bug,stupid_but_valid,xrange,
yrange,stack,call_last,merge_undefined,uninit,
- unsafe_catch,dead_code,mult_labels,
+ unsafe_catch,dead_code,
overwrite_catchtag,overwrite_trytag,accessing_tags,
bad_catch_try,cons_guard,freg_range,freg_uninit,
- freg_state,bin_match,bad_bin_match,bin_aligned,bad_dsetel,
+ freg_state,bad_bin_match,bin_aligned,bad_dsetel,
state_after_fault_in_catch,no_exception_in_catch,
- undef_label,illegal_instruction,failing_gc_guard_bif]}].
+ undef_label,illegal_instruction,failing_gc_guard_bif,
+ map_field_lists]}].
init_per_suite(Config) ->
Config.
@@ -72,33 +74,19 @@ init_per_group(_GroupName, Config) ->
end_per_group(_GroupName, Config) ->
Config.
-
-beam_files(Config) when is_list(Config) ->
- ?line DataDir = proplists:get_value(data_dir, Config),
- ?line Wc = filename:join([DataDir,"..","..","*","*.beam"]),
- %% Must have at least two files here, or there will be
- %% a grammatical error in the output of the io:format/2 call below. ;-)
- ?line [_,_|_] = Fs = filelib:wildcard(Wc),
- ?line io:format("~p files\n", [length(Fs)]),
- test_lib:p_run(fun do_beam_file/1, Fs).
-
-
-do_beam_file(F) ->
- case beam_validator:file(F) of
- ok ->
- ok;
- {error,Es} ->
- io:format("File: ~s", [F]),
- io:format("Error: ~p\n", [Es]),
- error
- end.
-
compiler_bug(Config) when is_list(Config) ->
%% Check that the compiler returns an error if we try to
%% assemble one of the bad '.S' files.
- ?line Data = ?config(data_dir, Config),
- ?line File = filename:join(Data, "stack"),
- ?line error = compile:file(File, [asm,report_errors,binary,time]),
+ Data = ?config(data_dir, Config),
+ File = filename:join(Data, "compiler_bug"),
+ error = compile:file(File, [from_asm,report_errors,time]),
+
+ %% Make sure that the error was reported by
+ %% the beam_validator module.
+ {error,
+ [{"compiler_bug",
+ [{beam_validator,_}]}],
+ []} = compile:file(File, [from_asm,return_errors,time]),
ok.
%% The following code is stupid but it should compile.
@@ -134,7 +122,7 @@ yrange(Config) when is_list(Config) ->
{{move,{x,1},{y,-1}},5,
{invalid_store,{y,-1},term}}},
{{t,sum_2,2},
- {{bif,'+',{f,0},[{x,0},{y,1024}],{x,0}},8,
+ {{bif,'+',{f,0},[{x,0},{y,1024}],{x,0}},7,
{uninitialized_reg,{y,1024}}}},
{{t,sum_3,2},
{{move,{x,1},{y,1024}},5,limit}},
@@ -145,31 +133,31 @@ yrange(Config) when is_list(Config) ->
stack(Config) when is_list(Config) ->
Errors = do_val(stack, Config),
- ?line [{{t,a,2},{return,11,{stack_frame,2}}},
- {{t,b,2},{{deallocate,2},4,{allocated,none}}},
- {{t,c,2},{{deallocate,2},12,{allocated,none}}},
- {{t,d,2},
- {{allocate,2,2},5,{existing_stack_frame,{size,2}}}},
- {{t,e,2},{{deallocate,5},6,{allocated,2}}},
- {{t,bad_1,0},{{allocate_zero,2,10},4,{{x,9},not_live}}},
- {{t,bad_2,0},{{move,{y,0},{x,0}},5,{unassigned,{y,0}}}}] = Errors,
+ [{{t,a,2},{return,9,{stack_frame,2}}},
+ {{t,b,2},{{deallocate,2},4,{allocated,none}}},
+ {{t,bad_1,0},{{allocate_zero,2,10},4,{{x,9},not_live}}},
+ {{t,bad_2,0},{{move,{y,0},{x,0}},5,{unassigned,{y,0}}}},
+ {{t,c,2},{{deallocate,2},10,{allocated,none}}},
+ {{t,d,2},
+ {{allocate,2,2},5,{existing_stack_frame,{size,2}}}},
+ {{t,e,2},{{deallocate,5},6,{allocated,2}}}] = Errors,
ok.
call_last(Config) when is_list(Config) ->
Errors = do_val(call_last, Config),
- ?line [{{t,a,1},{{call_last,1,{f,8},2},11,{allocated,1}}},
- {{t,b,1},
- {{call_ext_last,2,{extfunc,lists,seq,2},2},
- 11,
- {allocated,1}}}] = Errors,
+ [{{t,a,1},{{call_last,1,{f,8},2},9,{allocated,1}}},
+ {{t,b,1},
+ {{call_ext_last,2,{extfunc,lists,seq,2},2},
+ 10,
+ {allocated,1}}}] = Errors,
ok.
merge_undefined(Config) when is_list(Config) ->
Errors = do_val(merge_undefined, Config),
- ?line [{{t,handle_call,2},
- {{call_ext,2,{extfunc,debug,filter,2}},
- 22,
- {uninitialized_reg,{y,0}}}}] = Errors,
+ [{{t,handle_call,2},
+ {{call_ext,2,{extfunc,debug,filter,2}},
+ 22,
+ {uninitialized_reg,{y,0}}}}] = Errors,
ok.
uninit(Config) when is_list(Config) ->
@@ -178,10 +166,10 @@ uninit(Config) when is_list(Config) ->
[{{t,sum_1,2},
{{move,{y,0},{x,0}},5,{uninitialized_reg,{y,0}}}},
{{t,sum_2,2},
- {{call,1,{f,10}},6,{uninitialized_reg,{y,0}}}},
+ {{call,1,{f,8}},5,{uninitialized_reg,{y,0}}}},
{{t,sum_3,2},
{{bif,'+',{f,0},[{x,0},{y,0}],{x,0}},
- 7,
+ 6,
{unassigned,{y,0}}}}] = Errors,
ok.
@@ -199,10 +187,6 @@ dead_code(Config) when is_list(Config) ->
[] = do_val(dead_code, Config),
ok.
-mult_labels(Config) when is_list(Config) ->
- [] = do_val(erl_prim_loader, Config, ".beam"),
- ok.
-
overwrite_catchtag(Config) when is_list(Config) ->
Errors = do_val(overwrite_catchtag, Config),
?line
@@ -214,16 +198,15 @@ overwrite_trytag(Config) when is_list(Config) ->
Errors = do_val(overwrite_trytag, Config),
?line
[{{overwrite_trytag,foo,1},
- {{kill,{y,2}},9,{trytag,_}}}] = Errors,
+ {{kill,{y,2}},8,{trytag,_}}}] = Errors,
ok.
accessing_tags(Config) when is_list(Config) ->
Errors = do_val(accessing_tags, Config),
- ?line
- [{{accessing_tags,foo,1},
- {{move,{y,0},{x,0}},6,{catchtag,_}}},
- {{accessing_tags,bar,1},
- {{move,{y,0},{x,0}},6,{trytag,_}}}] = Errors,
+ [{{accessing_tags,bar,1},
+ {{move,{y,0},{x,0}},6,{trytag,_}}},
+ {{accessing_tags,foo,1},
+ {{move,{y,0},{x,0}},6,{catchtag,_}}}] = Errors,
ok.
bad_catch_try(Config) when is_list(Config) ->
@@ -310,13 +293,6 @@ freg_state(Config) when is_list(Config) ->
{fclearerror,5,{bad_floating_point_state,cleared}}}] = Errors,
ok.
-bin_match(Config) when is_list(Config) ->
- Errors = do_val(bin_match, Config),
- ?line
- [{{t,t,1},{{bs_save,0},4,no_bs_match_state}},
- {{t,x,1},{{bs_restore,1},16,{no_save_point,1}}}] = Errors,
- ok.
-
bad_bin_match(Config) when is_list(Config) ->
[{{t,t,1},{return,5,{match_context,{x,0}}}}] =
do_val(bad_bin_match, Config),
@@ -340,36 +316,69 @@ bad_dsetel(Config) when is_list(Config) ->
?line
[{{t,t,1},
{{set_tuple_element,{x,1},{x,0},1},
- 15,
+ 17,
illegal_context_for_set_tuple_element}}] = Errors,
ok.
state_after_fault_in_catch(Config) when is_list(Config) ->
Errors = do_val(state_after_fault_in_catch, Config),
- [{{t,foo,1},
- {{move,{x,1},{x,0}},10,{uninitialized_reg,{x,1}}}},
- {{state_after_fault_in_catch,if_end,1},
+ [{{state_after_fault_in_catch,badmatch,1},
{{move,{x,1},{x,0}},9,{uninitialized_reg,{x,1}}}},
{{state_after_fault_in_catch,case_end,1},
{{move,{x,1},{x,0}},9,{uninitialized_reg,{x,1}}}},
- {{state_after_fault_in_catch,badmatch,1},
- {{move,{x,1},{x,0}},9,{uninitialized_reg,{x,1}}}}] = Errors,
+ {{state_after_fault_in_catch,if_end,1},
+ {{move,{x,1},{x,0}},9,{uninitialized_reg,{x,1}}}},
+ {{t,foo,1},
+ {{move,{x,1},{x,0}},10,{uninitialized_reg,{x,1}}}}] = Errors,
ok.
no_exception_in_catch(Config) when is_list(Config) ->
Errors = do_val(no_exception_in_catch, Config),
[{{no_exception_in_catch,nested_of_1,4},
- {{move,{x,3},{x,0}},91,{uninitialized_reg,{x,3}}}}] = Errors,
+ {{move,{x,3},{x,0}},88,{uninitialized_reg,{x,3}}}}] = Errors,
ok.
undef_label(Config) when is_list(Config) ->
- Errors = do_val(undef_label, Config),
+ M = {undef_label,
+ [{t,1}],
+ [],
+ [{function,t,1,2,
+ [{label,1},
+ {func_info,{atom,undef_label},{atom,t},1},
+ {label,2},
+ {test,is_eq_exact,{f,42},[{x,0},{atom,x}]},
+ {move,{atom,ok},{x,0}},
+ return]},
+ {function,x,1,17,
+ [{label,3},
+ {func_info,{atom,undef_label},{atom,x},1},
+ {label,4},
+ return]}],
+ 5},
+ Errors = beam_val(M),
[{{undef_label,t,1},{undef_labels,[42]}},
{{undef_label,x,1},{return,4,no_entry_label}}] = Errors,
ok.
illegal_instruction(Config) when is_list(Config) ->
- Errors = do_val(illegal_instruction, Config),
+ M = {illegal_instruction,
+ [{t,1},{x,1},{y,0}],
+ [],
+ [{function,t,1,2,
+ [{label,1},
+ {func_info,{atom,illegal_instruction},{atom,t},1},
+ {label,2},
+ {my_illegal_instruction,{x,0}},
+ return]},
+ {function,x,1,4,
+ [{label,3},
+ bad_func_info,
+ {label,4},
+ {my_illegal_instruction,{x,0}},
+ return]},
+ {function,y,0,17,[]}],
+ 5},
+ Errors = beam_val(M),
[{{illegal_instruction,t,1},
{{my_illegal_instruction,{x,0}},4,unknown_instruction}},
{{'_',x,1},{bad_func_info,1,illegal_instruction}},
@@ -407,19 +416,40 @@ process_request_foo(_) ->
process_request_bar(Pid, [Response]) when is_pid(Pid) ->
Response.
+map_field_lists(Config) ->
+ Errors = do_val(map_field_lists, Config),
+ [{{map_field_lists,x,1},
+ {{test,has_map_fields,{f,1},{x,0},
+ {list,[{atom,z},{atom,a}]}},
+ 5,
+ not_strict_order}},
+ {{map_field_lists,y,1},
+ {{test,has_map_fields,{f,3},{x,0},{list,[]}},
+ 5,
+ empty_field_list}}
+ ] = Errors.
%%%-------------------------------------------------------------------------
-do_val(Name, Config) ->
- do_val(Name, Config, ".S").
-
-do_val(Name, Config, Type) ->
- ?line Data = ?config(data_dir, Config),
- ?line File = filename:join(Data, atom_to_list(Name)++Type),
- ?line case beam_validator:file(File) of
- {error,Errors} ->
- ?line io:format("~p:~n~s",
- [File,beam_validator:format_error(Errors)]),
- Errors;
- ok -> []
- end.
+do_val(Mod, Config) ->
+ Data = ?config(data_dir, Config),
+ Base = atom_to_list(Mod),
+ File = filename:join(Data, Base),
+ case compile:file(File, [from_asm,no_postopt,return_errors]) of
+ {error,L,[]} ->
+ [{Base,Errors0}] = L,
+ Errors = [E || {beam_validator,E} <- Errors0],
+ _ = [io:put_chars(beam_validator:format_error(E)) ||
+ E <- Errors],
+ Errors;
+ {ok,Mod} ->
+ []
+ end.
+
+beam_val(M) ->
+ Name = atom_to_list(element(1, M)),
+ {error,[{Name,Errors0}]} = beam_validator:module(M, []),
+ Errors = [E || {beam_validator,E} <- Errors0],
+ _ = [io:put_chars(beam_validator:format_error(E)) ||
+ E <- Errors],
+ Errors.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S b/lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S
index 279b2fa97f..9630d73a93 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S
@@ -1,4 +1,4 @@
-{module, t}. %% version = 0
+{module, bad_dsetel}. %% version = 0
{exports, [{module_info,0},{module_info,1},{t,1}]}.
@@ -21,7 +21,9 @@
{move,{integer,3},{x,0}}.
{call_ext,3,{extfunc,erlang,setelement,3}}.
{test_heap,6,1}.
- {put_string,3,{string,"abc"},{x,1}}.
+ {put_list,{integer,99},nil,{x,1}}.
+ {put_list,{integer,98},{x,1},{x,1}}.
+ {put_list,{integer,97},{x,1},{x,1}}.
{set_tuple_element,{x,1},{x,0},1}.
{'%live',1}.
{deallocate,0}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S b/lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S
index 2f353fbd25..a59f7ccc03 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S
@@ -1,4 +1,4 @@
-{module, t}. %% version = 0
+{module, bin_aligned}. %% version = 0
{exports, [{decode,1},{module_info,0},{module_info,1}]}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/bin_match.S b/lib/compiler/test/beam_validator_SUITE_data/bin_match.S
deleted file mode 100644
index 96df0f7933..0000000000
--- a/lib/compiler/test/beam_validator_SUITE_data/bin_match.S
+++ /dev/null
@@ -1,64 +0,0 @@
-{module, bin_match}. %% version = 0
-
-{exports, [{t,1}]}.
-
-{attributes, []}.
-
-{labels, 8}.
-
-
-{function, t, 1, 2}.
- {label,1}.
- {func_info,{atom,t},{atom,t},1}.
- {label,2}.
-%% {test,bs_start_match,{f,1},[{x,0}]}.
- {bs_save,0}.
- {test,bs_get_integer,
- {f,3},
- [{integer,8},1,{field_flags,[aligned,unsigned,big]},{x,1}]}.
- {test,bs_get_integer,
- {f,3},
- [{integer,8},1,{field_flags,[aligned,unsigned,big]},{x,2}]}.
- {test,bs_test_tail,{f,3},[0]}.
- {test_heap,3,3}.
- {put_tuple,2,{x,0}}.
- {put,{x,1}}.
- {put,{x,2}}.
- {'%live',1}.
- return.
- {label,3}.
- {bs_restore,0}.
- {test,bs_get_integer,
- {f,1},
- [{integer,32},1,{field_flags,[aligned,unsigned,big]},{x,1}]}.
- {test,bs_test_tail,{f,1},[0]}.
- {move,{x,1},{x,0}}.
- return.
-
-{function, x, 1, 5}.
- {label,4}.
- {func_info,{atom,t},{atom,x},1}.
- {label,5}.
- {test,bs_start_match,{f,4},[{x,0}]}.
- {bs_save,0}.
- {test,bs_get_integer,
- {f,6},
- [{integer,8},1,{field_flags,[aligned,unsigned,big]},{x,1}]}.
- {test,bs_get_integer,
- {f,6},
- [{integer,8},1,{field_flags,[aligned,unsigned,big]},{x,2}]}.
- {test,bs_test_tail,{f,6},[0]}.
- {test_heap,3,3}.
- {put_tuple,2,{x,0}}.
- {put,{x,1}}.
- {put,{x,2}}.
- {'%live',1}.
- return.
- {label,6}.
- {bs_restore,1}.
- {test,bs_get_integer,
- {f,4},
- [{integer,32},1,{field_flags,[aligned,unsigned,big]},{x,1}]}.
- {test,bs_test_tail,{f,4},[0]}.
- {move,{x,1},{x,0}}.
- return.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/compiler_bug.S b/lib/compiler/test/beam_validator_SUITE_data/compiler_bug.S
new file mode 100644
index 0000000000..ba27bf5c47
--- /dev/null
+++ b/lib/compiler/test/beam_validator_SUITE_data/compiler_bug.S
@@ -0,0 +1,38 @@
+{module, compiler_bug}. %% version = 0
+
+{exports, [{module_info,0},{module_info,1},{sum,2}]}.
+
+{attributes, []}.
+
+{labels, 7}.
+
+
+{function, sum, 2, 2}.
+ {label,1}.
+ {line,[{location,"compiler_bug.erl",4}]}.
+ {func_info,{atom,compiler_bug},{atom,sum},2}.
+ {label,2}.
+ {line,[{location,"compiler_bug.erl",5}]}.
+ {gc_bif,'+',{f,0},2,[{y,0},{y,1}],{x,0}}.
+ return.
+
+
+{function, module_info, 0, 4}.
+ {label,3}.
+ {line,[]}.
+ {func_info,{atom,compiler_bug},{atom,module_info},0}.
+ {label,4}.
+ {move,{atom,compiler_bug},{x,0}}.
+ {line,[]}.
+ {call_ext_only,1,{extfunc,erlang,get_module_info,1}}.
+
+
+{function, module_info, 1, 6}.
+ {label,5}.
+ {line,[]}.
+ {func_info,{atom,compiler_bug},{atom,module_info},1}.
+ {label,6}.
+ {move,{x,0},{x,1}}.
+ {move,{atom,compiler_bug},{x,0}}.
+ {line,[]}.
+ {call_ext_only,2,{extfunc,erlang,get_module_info,2}}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/dead_code.S b/lib/compiler/test/beam_validator_SUITE_data/dead_code.S
index f964f98fba..c114664ba0 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/dead_code.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/dead_code.S
@@ -1,10 +1,10 @@
{module, dead_code}. %% version = 0
-{exports, [{execute,0},{module_info,0},{module_info,1}]}.
+{exports, [{execute,0}]}.
{attributes, []}.
-{labels, 10}.
+{labels, 6}.
{function, execute, 0, 2}.
@@ -12,7 +12,6 @@
{func_info,{atom,dead_code},{atom,execute},0}.
{label,2}.
{allocate,0,0}.
- {'%live',0}.
{call_ext,0,{extfunc,foo,fie,0}}.
{test,is_ne,{f,4},[{x,0},{integer,0}]}.
{test,is_ne,{f,4},[{x,0},{integer,1}]}.
@@ -22,27 +21,7 @@
{case_end,{x,0}}.
{label,4}.
{move,{atom,ok},{x,0}}.
- {'%live',1}.
{deallocate,0}.
return.
- {'%','Moved code'}.
{label,5}.
{case_end,{x,0}}.
-
-
-{function, module_info, 0, 7}.
- {label,6}.
- {func_info,{atom,dead_code},{atom,module_info},0}.
- {label,7}.
- {move,nil,{x,0}}.
- {'%live',1}.
- return.
-
-
-{function, module_info, 1, 9}.
- {label,8}.
- {func_info,{atom,dead_code},{atom,module_info},1}.
- {label,9}.
- {move,nil,{x,0}}.
- {'%live',1}.
- return.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/erl_prim_loader.beam b/lib/compiler/test/beam_validator_SUITE_data/erl_prim_loader.beam
deleted file mode 100644
index dd58a88e42..0000000000
--- a/lib/compiler/test/beam_validator_SUITE_data/erl_prim_loader.beam
+++ /dev/null
Binary files differ
diff --git a/lib/compiler/test/beam_validator_SUITE_data/freg_range.S b/lib/compiler/test/beam_validator_SUITE_data/freg_range.S
index ee583a923e..b3ebff3ade 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/freg_range.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/freg_range.S
@@ -1,10 +1,10 @@
{module, freg_range}. %% version = 0
-{exports, [{module_info,0},{module_info,1},{prod,2},{sum,2},{sum_prod,3}]}.
+{exports, [{sum_1,2},{sum_2,2},{sum_3,2},{sum_4,2}]}.
{attributes, []}.
-{labels, 8}.
+{labels, 9}.
{function, sum_1, 2, 2}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/freg_state.S b/lib/compiler/test/beam_validator_SUITE_data/freg_state.S
index ff4d7548ae..7466763482 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/freg_state.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/freg_state.S
@@ -1,6 +1,6 @@
{module, freg_state}. %% version = 0
-{exports, []}.
+{exports, [{sum_1,2},{sum_2,2},{sum_3,2},{sum_4,2},{sum_5,2}]}.
{attributes, []}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S b/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S
index f8d805d9ec..71e833446a 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S
@@ -1,10 +1,10 @@
{module, freg_uninit}. %% version = 0
-{exports, []}.
+{exports, [{sum_1,2},{sum_2,2}]}.
{attributes, []}.
-{labels, 8}.
+{labels, 7}.
{function, sum_1, 2, 2}.
@@ -14,7 +14,6 @@
{fconv,{x,0},{fr,0}}.
fclearerror.
{bif,fadd,{f,0},[{fr,0},{fr,1}],{fr,0}}.
- {'%live',1}.
return.
@@ -26,7 +25,12 @@
{fconv,{x,1},{fr,1}}.
fclearerror.
{fcheckerror,{f,0}}.
- {call,2,{f,8}}.
+ {call,2,{f,6}}.
{bif,fadd,{f,0},[{fr,0},{fr,1}],{fr,0}}.
- {'%live',1}.
+ return.
+
+{function, foo, 2, 6}.
+ {label,5}.
+ {func_info,{atom,t},{atom,foo},2}.
+ {label,6}.
return.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/illegal_instruction.S b/lib/compiler/test/beam_validator_SUITE_data/illegal_instruction.S
deleted file mode 100644
index d6e92abc71..0000000000
--- a/lib/compiler/test/beam_validator_SUITE_data/illegal_instruction.S
+++ /dev/null
@@ -1,26 +0,0 @@
-{module, illegal_instruction}. %% version = 0
-
-{exports, []}.
-
-{attributes, []}.
-
-{labels, 7}.
-
-
-{function, t, 1, 2}.
- {label,1}.
- {func_info,{atom,illegal_instruction},{atom,t},1}.
- {label,2}.
- {my_illegal_instruction,{x,0}}.
- return.
-
-
-{function, x, 1, 4}.
- {label,3}.
- bad_func_info.
- {label,4}.
- {my_illegal_instruction,{x,0}}.
- return.
-
-{function, y, 0, 17}.
- \ No newline at end of file
diff --git a/lib/compiler/test/beam_validator_SUITE_data/map_field_lists.S b/lib/compiler/test/beam_validator_SUITE_data/map_field_lists.S
new file mode 100644
index 0000000000..9af68c82d4
--- /dev/null
+++ b/lib/compiler/test/beam_validator_SUITE_data/map_field_lists.S
@@ -0,0 +1,29 @@
+{module, map_field_lists}. %% version = 0
+
+{exports, [{x,1},{y,1}]}.
+
+{attributes, []}.
+
+{labels, 5}.
+
+
+{function, x, 1, 2}.
+ {label,1}.
+ {line,[{location,"map_field_lists.erl",4}]}.
+ {func_info,{atom,map_field_lists},{atom,x},1}.
+ {label,2}.
+ {test,is_map,{f,1},[{x,0}]}.
+ {test,has_map_fields,{f,1},{x,0},{list,[{atom,z},{atom,a}]}}.
+ {move,{atom,ok},{x,0}}.
+ return.
+
+
+{function, y, 1, 4}.
+ {label,3}.
+ {line,[{location,"map_field_lists.erl",7}]}.
+ {func_info,{atom,map_field_lists},{atom,y},1}.
+ {label,4}.
+ {test,is_map,{f,3},[{x,0}]}.
+ {test,has_map_fields,{f,3},{x,0},{list,[]}}.
+ {move,{atom,ok},{x,0}}.
+ return.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S b/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S
index 3d76127824..481d55045d 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S
@@ -22,7 +22,8 @@
{label,4}.
{allocate_heap,1,6,2}.
{move,{x,1},{y,0}}.
- {put_string,2,{string,"~p"},{x,0}}.
+ {put_list,{integer,112},nil,{x,0}}.
+ {put_list,{integer,126},{x,0},{x,0}}.
{put_list,{y,0},nil,{x,1}}.
{'%live',2}.
{call_ext,2,{extfunc,io,format,2}}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S b/lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S
index e08a718a39..1a5b417a5f 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S
@@ -26,7 +26,7 @@
{call_ext,1,{extfunc,erlang,erase,1}}.
{move,{atom,nested},{x,0}}.
{call_ext,1,{extfunc,erlang,erase,1}}.
- {bif,self,nofail,[],{x,0}}.
+ {bif,self,{f,0},[],{x,0}}.
{'try',{y,8},{f,13}}.
{'try',{y,7},{f,11}}.
{'try',{y,6},{f,9}}.
@@ -34,7 +34,7 @@
%% Because the following instructions can't possible throw an exception,
%% label 7 used to get no state. Now the try_end itself will save the state.
{move,{x,0},{y,4}}.
- {bif,self,nofail,[],{x,0}}.
+ {bif,self,{f,0},[],{x,0}}.
{'%live',1}.
{try_end,{y,5}}.
{test,is_eq_exact,{f,15},[{x,0},{y,4}]}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/stack.S b/lib/compiler/test/beam_validator_SUITE_data/stack.S
index 244c22a2f9..e4356a9d00 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/stack.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/stack.S
@@ -1,10 +1,10 @@
{module, stack}. %% version = 0
-{exports, [{a,2},{b,2},{c,2},{d,2},{e,2}]}.
+{exports, [{a,2},{b,2},{c,2},{d,2},{e,2},{bad_1,0},{bad_2,0},{foo,0}]}.
{attributes, []}.
-{labels, 21}.
+{labels, 17}.
{function, a, 2, 2}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/undef_label.S b/lib/compiler/test/beam_validator_SUITE_data/undef_label.S
deleted file mode 100644
index dd29066bf4..0000000000
--- a/lib/compiler/test/beam_validator_SUITE_data/undef_label.S
+++ /dev/null
@@ -1,22 +0,0 @@
-{module, undef_label}. %% version = 0
-
-{exports, []}.
-
-{attributes, []}.
-
-{labels, 7}.
-
-
-{function, t, 1, 2}.
- {label,1}.
- {func_info,{atom,undef_label},{atom,t},1}.
- {label,2}.
- {test,is_eq_exact,{f,42},[{x,0},{atom,x}]}.
- {move,{atom,ok},{x,0}}.
- return.
-
-{function, x, 1, 17}.
- {label,3}.
- {func_info,{atom,undef_label},{atom,x},1}.
- {label,4}.
- return.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/uninit.S b/lib/compiler/test/beam_validator_SUITE_data/uninit.S
index 1a45c31411..9a66f4f7d6 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/uninit.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/uninit.S
@@ -1,9 +1,11 @@
{module, uninit}. %% version = 0
-{exports, []}.
+{exports, [{sum_1,2},{sum_2,2},{sum_3,2}]}.
{attributes, []}.
+{labels, 9}.
+
{function, sum_1, 2, 2}.
{label,1}.
{func_info,{atom,t},{atom,sum_1},2}.
@@ -11,7 +13,7 @@
{allocate,1,2}.
{move,{y,0},{x,0}}.
{'%live',1}.
- {call,1,{f,10}}.
+ {call,1,{f,8}}.
{bif,'+',{f,0},[{x,0},{y,0}],{x,0}}.
{'%live',1}.
{deallocate,1}.
@@ -23,7 +25,7 @@
{label,4}.
{allocate,1,2}.
{'%live',1}.
- {call,1,{f,10}}.
+ {call,1,{f,8}}.
{bif,'+',{f,0},[{x,0},{y,0}],{x,0}}.
{'%live',1}.
{deallocate,1}.
@@ -35,14 +37,14 @@
{label,6}.
{allocate_zero,1,2}.
{'%live',1}.
- {call,1,{f,10}}.
+ {call,1,{f,8}}.
{bif,'+',{f,0},[{x,0},{y,0}],{x,0}}.
{'%live',1}.
{deallocate,1}.
return.
-{function, id, 1, 10}.
- {label,9}.
+{function, id, 1, 8}.
+ {label,7}.
{func_info,{atom,t},{atom,id},1}.
- {label,10}.
+ {label,8}.
return.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/xrange.S b/lib/compiler/test/beam_validator_SUITE_data/xrange.S
index 3abbdffbc2..c6f20288f7 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/xrange.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/xrange.S
@@ -1,10 +1,10 @@
{module, xrange}. %% version = 0
-{exports, [{module_info,0},{module_info,1},{prod,2},{sum,2},{sum_prod,3}]}.
+{exports, [{sum_1,2},{sum_2,2},{sum_3,2},{sum_4,2}]}.
{attributes, []}.
-{labels, 8}.
+{labels, 9}.
{function, sum_1, 2, 2}.
diff --git a/lib/compiler/test/bs_bit_binaries_SUITE.erl b/lib/compiler/test/bs_bit_binaries_SUITE.erl
index 8609a490f5..2433e7621e 100644
--- a/lib/compiler/test/bs_bit_binaries_SUITE.erl
+++ b/lib/compiler/test/bs_bit_binaries_SUITE.erl
@@ -37,7 +37,7 @@ all() ->
[{group,p}].
groups() ->
- [{p,test_lib:parallel(),
+ [{p,[parallel],
[misc,horrid_match,test_bitstr,test_bit_size,
asymmetric_tests,big_asymmetric_tests,
binary_to_and_from_list,big_binary_to_and_from_list,
diff --git a/lib/compiler/test/bs_construct_SUITE.erl b/lib/compiler/test/bs_construct_SUITE.erl
index ce39de2a82..9df874c387 100644
--- a/lib/compiler/test/bs_construct_SUITE.erl
+++ b/lib/compiler/test/bs_construct_SUITE.erl
@@ -39,7 +39,7 @@ all() ->
[{group,p}].
groups() ->
- [{p,test_lib:parallel(),
+ [{p,[parallel],
[two,test1,fail,float_bin,in_guard,in_catch,
nasty_literals,side_effect,opt,otp_7556,float_arith,
otp_8054]}].
diff --git a/lib/compiler/test/bs_match_SUITE.erl b/lib/compiler/test/bs_match_SUITE.erl
index 149b9bbb8f..f7af56afcc 100644
--- a/lib/compiler/test/bs_match_SUITE.erl
+++ b/lib/compiler/test/bs_match_SUITE.erl
@@ -34,7 +34,7 @@
otp_7188/1,otp_7233/1,otp_7240/1,otp_7498/1,
match_string/1,zero_width/1,bad_size/1,haystack/1,
cover_beam_bool/1,matched_out_size/1,follow_fail_branch/1,
- no_partition/1,calling_a_binary/1]).
+ no_partition/1,calling_a_binary/1,binary_in_map/1]).
-export([coverage_id/1,coverage_external_ignore/2]).
@@ -48,7 +48,7 @@ all() ->
[{group,p}].
groups() ->
- [{p,test_lib:parallel(),
+ [{p,[parallel],
[fun_shadow,int_float,otp_5269,null_fields,wiger,
bin_tail,save_restore,shadowed_size_var,
partitioned_bs_match,function_clause,unit,
@@ -59,7 +59,7 @@ groups() ->
matching_and_andalso,otp_7188,otp_7233,otp_7240,
otp_7498,match_string,zero_width,bad_size,haystack,
cover_beam_bool,matched_out_size,follow_fail_branch,
- no_partition,calling_a_binary]}].
+ no_partition,calling_a_binary,binary_in_map]}].
init_per_suite(Config) ->
@@ -368,11 +368,20 @@ partitioned_bs_match_3(Var, <<_>>) -> Var;
partitioned_bs_match_3(1, 2) -> ok.
function_clause(Config) when is_list(Config) ->
- ?line ok = function_clause_1(<<0,7,0,7,42>>),
- ?line fc(function_clause_1, [<<0,1,2,3>>],
- catch function_clause_1(<<0,1,2,3>>)),
- ?line fc(function_clause_1, [<<0,1,2,3>>],
- catch function_clause_1(<<0,7,0,1,2,3>>)),
+ ok = function_clause_1(<<0,7,0,7,42>>),
+ fc(function_clause_1, [<<0,1,2,3>>],
+ catch function_clause_1(<<0,1,2,3>>)),
+ fc(function_clause_1, [<<0,1,2,3>>],
+ catch function_clause_1(<<0,7,0,1,2,3>>)),
+
+ ok = function_clause_2(<<0,7,0,7,42>>),
+ ok = function_clause_2(<<255>>),
+ ok = function_clause_2(<<13:4>>),
+ fc(function_clause_2, [<<0,1,2,3>>],
+ catch function_clause_2(<<0,1,2,3>>)),
+ fc(function_clause_2, [<<0,1,2,3>>],
+ catch function_clause_2(<<0,7,0,1,2,3>>)),
+
ok.
function_clause_1(<<0:8,7:8,T/binary>>) ->
@@ -380,6 +389,13 @@ function_clause_1(<<0:8,7:8,T/binary>>) ->
function_clause_1(<<_:8>>) ->
ok.
+function_clause_2(<<0:8,7:8,T/binary>>) ->
+ function_clause_2(T);
+function_clause_2(<<_:8>>) ->
+ ok;
+function_clause_2(<<_:4>>) ->
+ ok.
+
unit(Config) when is_list(Config) ->
?line 42 = peek1(<<42>>),
?line 43 = peek1(<<43,1,2>>),
@@ -1189,6 +1205,26 @@ call_binary(<<>>, Acc) ->
call_binary(<<H,T/bits>>, Acc) ->
T(<<Acc/binary,H>>).
+binary_in_map(Config) when is_list(Config) ->
+ ok = match_binary_in_map(#{key => <<42:8>>}),
+ {'EXIT',{{badmatch,#{key := 1}},_}} =
+ (catch match_binary_in_map(#{key => 1})),
+ {'EXIT',{{badmatch,#{key := <<1023:16>>}},_}} =
+ (catch match_binary_in_map(#{key => <<1023:16>>})),
+ {'EXIT',{{badmatch,#{key := <<1:8>>}},_}} =
+ (catch match_binary_in_map(#{key => <<1:8>>})),
+ {'EXIT',{{badmatch,not_a_map},_}} =
+ (catch match_binary_in_map(not_a_map)),
+ ok.
+
+match_binary_in_map(Map) ->
+ case 8 of
+ N ->
+ #{key := <<42:N>>} = Map,
+ ok
+ end.
+
+
check(F, R) ->
R = F().
diff --git a/lib/compiler/test/compilation_SUITE.erl b/lib/compiler/test/compilation_SUITE.erl
index 8711f35e8e..296774e083 100644
--- a/lib/compiler/test/compilation_SUITE.erl
+++ b/lib/compiler/test/compilation_SUITE.erl
@@ -611,12 +611,10 @@ otp_7345(Config) when is_list(Config) ->
otp_7345(ObjRef, _RdEnv, Args) ->
Cid = ObjRef#contextId.cid,
- _DpRef =
- #dpRef{cid = Cid,
+ _ = #dpRef{cid = Cid,
ms_device_context_id = cid_id,
tlli = #ptmsi{value = 0}},
- _QosProfile =
- #qosProfileBssgp{peak_bit_rate_msb = 0,
+ _ = #qosProfileBssgp{peak_bit_rate_msb = 0,
peak_bit_rate_lsb = 80,
t_a_precedence = 49},
[Cpdu|_] = Args,
diff --git a/lib/compiler/test/compile_SUITE.erl b/lib/compiler/test/compile_SUITE.erl
index 128291dc67..1c96abe017 100644
--- a/lib/compiler/test/compile_SUITE.erl
+++ b/lib/compiler/test/compile_SUITE.erl
@@ -30,7 +30,7 @@
other_output/1, encrypted_abstr/1,
bad_record_use1/1, bad_record_use2/1, strict_record/1,
missing_testheap/1, cover/1, env/1, core/1, asm/1,
- sys_pre_attributes/1]).
+ sys_pre_attributes/1, dialyzer/1]).
-export([init/3]).
@@ -47,7 +47,7 @@ all() ->
other_output, encrypted_abstr,
{group, bad_record_use}, strict_record,
missing_testheap, cover, env, core, asm,
- sys_pre_attributes].
+ sys_pre_attributes, dialyzer].
groups() ->
[{bad_record_use, [],
@@ -748,42 +748,65 @@ env_1(Simple, Target) ->
%% compile the generated Core Erlang files.
core(Config) when is_list(Config) ->
- ?line Dog = test_server:timetrap(test_server:minutes(5)),
- ?line PrivDir = ?config(priv_dir, Config),
- ?line Outdir = filename:join(PrivDir, "core"),
- ?line ok = file:make_dir(Outdir),
+ PrivDir = ?config(priv_dir, Config),
+ Outdir = filename:join(PrivDir, "core"),
+ ok = file:make_dir(Outdir),
- ?line Wc = filename:join(filename:dirname(code:which(?MODULE)), "*.beam"),
- ?line TestBeams = filelib:wildcard(Wc),
- ?line Abstr = [begin {ok,{Mod,[{abstract_code,
+ Wc = filename:join(filename:dirname(code:which(?MODULE)), "*.beam"),
+ TestBeams = filelib:wildcard(Wc),
+ Abstr = [begin {ok,{Mod,[{abstract_code,
{raw_abstract_v1,Abstr}}]}} =
beam_lib:chunks(Beam, [abstract_code]),
{Mod,Abstr} end || Beam <- TestBeams],
- ?line Res = test_lib:p_run(fun(F) -> do_core(F, Outdir) end, Abstr),
- ?line test_server:timetrap_cancel(Dog),
- Res.
-
+ test_lib:p_run(fun(F) -> do_core(F, Outdir) end, Abstr).
do_core({M,A}, Outdir) ->
try
- {ok,M,Core} = compile:forms(A, [to_core,report]),
- CoreFile = filename:join(Outdir, atom_to_list(M)++".core"),
- CorePP = core_pp:format(Core),
- ok = file:write_file(CoreFile, CorePP),
- case compile:file(CoreFile, [clint,from_core,binary]) of
- {ok,M,_} ->
- ok = file:delete(CoreFile);
- Other ->
- io:format("*** core_lint failure '~p' for ~s\n",
- [Other,CoreFile]),
- error
- end
- catch Class:Error ->
+ do_core_1(M, A, Outdir)
+ catch
+ throw:{error,Error} ->
+ io:format("*** compilation failure '~p' for module ~s\n",
+ [Error,M]),
+ error;
+ Class:Error ->
io:format("~p: ~p ~p\n~p\n",
[M,Class,Error,erlang:get_stacktrace()]),
error
end.
+do_core_1(M, A, Outdir) ->
+ {ok,M,Core0} = compile:forms(A, [to_core]),
+ CoreFile = filename:join(Outdir, atom_to_list(M)++".core"),
+ CorePP = core_pp:format(Core0),
+ ok = file:write_file(CoreFile, CorePP),
+
+ %% Parse the .core file and return the result as Core Erlang Terms.
+ Core = case compile:file(CoreFile, [report_errors,from_core,no_copt,to_core,binary]) of
+ {ok,M,Core1} -> Core1;
+ Other -> throw({error,Other})
+ end,
+ ok = file:delete(CoreFile),
+
+ %% Compile as usual (including optimizations).
+ compile_forms(Core, [clint,from_core,binary]),
+
+ %% Don't optimize to test that we are not dependent
+ %% on the Core Erlang optmimization passes.
+ %% (Example of a previous bug: The core_parse pass
+ %% would not turn map literals into #c_literal{}
+ %% records; if sys_core_fold was run it would fix
+ %% that; if sys_core_fold was not run v3_kernel would
+ %% crash.)
+ compile_forms(Core, [clint,from_core,no_copt,binary]),
+
+ ok.
+
+compile_forms(Forms, Opts) ->
+ case compile:forms(Forms, [report_errors|Opts]) of
+ {ok,[],_} -> ok;
+ Other -> throw({error,Other})
+ end.
+
%% Compile to Beam assembly language (.S) and then try to
%% run .S through the compiler again.
@@ -854,6 +877,20 @@ sys_pre_attributes(Config) ->
[report,verbose]),
ok.
+%% Test the dialyzer option to cover more code.
+dialyzer(Config) ->
+ Priv = ?config(priv_dir, Config),
+ file:set_cwd(?config(data_dir, Config)),
+ Opts = [{outdir,Priv},report_errors],
+ M = dialyzer_test,
+ {ok,M} = c:c(M, [dialyzer|Opts]),
+ [{a,b,c}] = M:M(),
+
+ %% Cover huge line numbers without the 'dialyzer' option.
+ {ok,M} = c:c(M, Opts),
+ [{a,b,c}] = M:M(),
+ ok.
+
%%%
%%% Utilities.
%%%
diff --git a/lib/compiler/test/compile_SUITE_data/dialyzer_test.erl b/lib/compiler/test/compile_SUITE_data/dialyzer_test.erl
new file mode 100644
index 0000000000..ed65ff9c43
--- /dev/null
+++ b/lib/compiler/test/compile_SUITE_data/dialyzer_test.erl
@@ -0,0 +1,39 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(dialyzer_test).
+-export([?MODULE/0,turtle/0,test/1,huge/1]).
+
+-record(turtle, {a,b,c}).
+-record(tortoise, {a,b,c}).
+
+?MODULE() ->
+ [{a,b,c}].
+
+turtle() ->
+ #turtle{a=1,b=2,c=3}.
+
+test(T) ->
+ {T#tortoise.a,T#tortoise.b}.
+
+-file("dialyzer_test", 100000000).
+
+huge(X) ->
+ #turtle{a=42,b=100,c=511},
+ X#tortoise.a.
diff --git a/lib/compiler/test/core_SUITE_data/map_core_test.core b/lib/compiler/test/core_SUITE_data/map_core_test.core
index 2aa853d450..a75f6cf24f 100644
--- a/lib/compiler/test/core_SUITE_data/map_core_test.core
+++ b/lib/compiler/test/core_SUITE_data/map_core_test.core
@@ -7,11 +7,11 @@ module 'map_core_test' ['map_core_test'/0,
fun () ->
let <_cor0> =
%% Line 15
- ~{::<'check','ok'>,::<1337,#{#<104>(8,1,'integer',['unsigned'|['big']]),
+ ~{'check'=>'ok',1337=>#{#<104>(8,1,'integer',['unsigned'|['big']]),
#<101>(8,1,'integer',['unsigned'|['big']]),
#<108>(8,1,'integer',['unsigned'|['big']]),
#<108>(8,1,'integer',['unsigned'|['big']]),
- #<111>(8,1,'integer',['unsigned'|['big']])}#>,::<'val',0>}~
+ #<111>(8,1,'integer',['unsigned'|['big']])}#,'val'=>0}~
in let <M> =
%% Line 15
apply 'id'/1
@@ -23,7 +23,7 @@ module 'map_core_test' ['map_core_test'/0,
in %% Line 16
case apply 'call'/2
(M, _cor2) of
- <~{~<1337,#{#<104>(8,1,'integer',['unsigned'|['big']]),
+ <~{1337:=#{#<104>(8,1,'integer',['unsigned'|['big']]),
#<101>(8,1,'integer',['unsigned'|['big']]),
#<108>(8,1,'integer',['unsigned'|['big']]),
#<108>(8,1,'integer',['unsigned'|['big']]),
@@ -39,7 +39,7 @@ module 'map_core_test' ['map_core_test'/0,
#<32>(8,1,'integer',['unsigned'|['big']]),
#<53>(8,1,'integer',['unsigned'|['big']]),
#<32>(8,1,'integer',['unsigned'|['big']]),
- #<54>(8,1,'integer',['unsigned'|['big']])}#>,~<'check','ok'>,~<'val',21>}~> when 'true' ->
+ #<54>(8,1,'integer',['unsigned'|['big']])}#,'check':='ok','val':=21}~> when 'true' ->
%% Line 17
'ok'
( <_cor3> when 'true' ->
@@ -51,7 +51,7 @@ module 'map_core_test' ['map_core_test'/0,
%% Line 20
fun (_cor1,_cor0) ->
case <_cor1,_cor0> of
- <M = ~{~<1337,Bin>,~<'check',_cor8>,~<'val',Val>}~,[V|Vs]> when 'true' ->
+ <M = ~{1337:=Bin,'check':=_cor8,'val':=Val}~,[V|Vs]> when 'true' ->
let <_cor3> =
%% Line 21
call 'erlang':'+'
@@ -67,7 +67,7 @@ module 'map_core_test' ['map_core_test'/0,
(Val, V)
in let <_cor5> =
%% Line 21
- ~{~<1337,_cor4>,~<'val',_cor2>|M}~
+ ~{1337:=_cor4,'val':=_cor2|M}~
in %% Line 21
apply 'call'/2
(_cor5, Vs)
diff --git a/lib/compiler/test/core_fold_SUITE.erl b/lib/compiler/test/core_fold_SUITE.erl
index 6a7036d728..512aada203 100644
--- a/lib/compiler/test/core_fold_SUITE.erl
+++ b/lib/compiler/test/core_fold_SUITE.erl
@@ -23,7 +23,8 @@
t_element/1,setelement/1,t_length/1,append/1,t_apply/1,bifs/1,
eq/1,nested_call_in_case/1,guard_try_catch/1,coverage/1,
unused_multiple_values_error/1,unused_multiple_values/1,
- multiple_aliases/1,redundant_boolean_clauses/1,mixed_matching_clauses/1]).
+ multiple_aliases/1,redundant_boolean_clauses/1,
+ mixed_matching_clauses/1,unnecessary_building/1]).
-export([foo/0,foo/1,foo/2,foo/3]).
@@ -36,11 +37,12 @@ all() ->
[{group,p}].
groups() ->
- [{p,test_lib:parallel(),
+ [{p,[parallel],
[t_element,setelement,t_length,append,t_apply,bifs,
eq,nested_call_in_case,guard_try_catch,coverage,
unused_multiple_values_error,unused_multiple_values,
- multiple_aliases,redundant_boolean_clauses,mixed_matching_clauses]}].
+ multiple_aliases,redundant_boolean_clauses,
+ mixed_matching_clauses,unnecessary_building]}].
init_per_suite(Config) ->
@@ -60,6 +62,12 @@ t_element(Config) when is_list(Config) ->
X = make_ref(),
?line X = id(element(1, {X,y,z})),
?line b = id(element(2, {a,b,c,d})),
+ (fun() ->
+ case {a,#{k=>X}} of
+ {a,#{k:=X}}=Tuple ->
+ #{k:=X} = id(element(2, Tuple))
+ end
+ end)(),
%% No optimization, but should work.
Tuple = id({x,y,z}),
@@ -189,7 +197,10 @@ foo(A, B, C) ->
A + B + C.
bifs(Config) when is_list(Config) ->
- ?line <<1,2,3,4>> = id(list_to_binary([1,2,3,4])),
+ <<1,2,3,4>> = id(list_to_binary([1,2,3,4])),
+ K = {a,key},
+ V = {a,value},
+ {ok,#{K:=V}} = id(list_to_tuple([ok,#{K=>V}])),
ok.
-define(CMP_SAME(A0, B), (fun(A) -> true = A == B, false = A /= B end)(id(A0))).
@@ -204,6 +215,16 @@ eq(Config) when is_list(Config) ->
?line ?CMP_DIFF(a, [a]),
?line ?CMP_DIFF(a, {1,2,3}),
+ ?CMP_SAME(#{a=>1.0,b=>2}, #{b=>2.0,a=>1}),
+ ?CMP_SAME(#{a=>[1.0],b=>[2]}, #{b=>[2.0],a=>[1]}),
+
+ %% The rule for comparing keys are different in 17.x and 18.x.
+ %% Just test that the results are consistent.
+ Bool = id(#{1=>a}) == id(#{1.0=>a}), %Unoptimizable.
+ Bool = id(#{1=>a}) == #{1.0=>a}, %Optimizable.
+ Bool = #{1=>a} == #{1.0=>a}, %Optimizable.
+ io:format("Bool = ~p\n", [Bool]),
+
ok.
%% OTP-7117.
@@ -236,6 +257,8 @@ do_guard_try_catch(K, V) ->
false
end.
+-record(cover_opt_guard_try, {list=[]}).
+
coverage(Config) when is_list(Config) ->
?line {'EXIT',{{case_clause,{a,b,c}},_}} =
(catch cover_will_match_list_type({a,b,c})),
@@ -245,6 +268,9 @@ coverage(Config) when is_list(Config) ->
?line error = cover_will_match_lit_list(),
{ok,[a]} = cover_is_safe_bool_expr(a),
+ ok = cover_opt_guard_try(#cover_opt_guard_try{list=[a]}),
+ error = cover_opt_guard_try(#cover_opt_guard_try{list=[]}),
+
%% Make sure that we don't attempt to make literals
%% out of pids. (Putting a pid into a #c_literal{}
%% would crash later compiler passes.)
@@ -257,6 +283,12 @@ coverage(Config) when is_list(Config) ->
error = bsm_an_inlined(<<1,2,3>>, Config),
error = bsm_an_inlined([], Config),
+ %% Cover eval_rel_op/4.
+ Tuple = id({a,b}),
+ false = case Tuple of
+ {_,_} ->
+ Tuple =:= true
+ end,
ok.
cover_will_match_list_type(A) ->
@@ -298,6 +330,14 @@ cover_is_safe_bool_expr(X) ->
false
end.
+cover_opt_guard_try(Msg) ->
+ if
+ length(Msg#cover_opt_guard_try.list) =/= 1 ->
+ error;
+ true ->
+ ok
+ end.
+
bsm_an_inlined(<<_:8>>, _) -> ok;
bsm_an_inlined(_, _) -> error.
@@ -384,4 +424,29 @@ mixed_matching_clauses(Config) when is_list(Config) ->
end,
ok.
+unnecessary_building(Config) when is_list(Config) ->
+ Term1 = do_unnecessary_building_1(test_lib:id(a)),
+ [{a,a},{a,a}] = Term1,
+ 7 = erts_debug:size(Term1),
+
+ %% The Input term should not be rebuilt (thus, it should
+ %% only be counted once in the size of the combined term).
+ Input = test_lib:id({a,b,c}),
+ Term2 = test_lib:id(do_unnecessary_building_2(Input)),
+ {b,[{a,b,c},none],x} = Term2,
+ 4+4+4+2 = erts_debug:size([Term2|Input]),
+
+ ok.
+
+do_unnecessary_building_1(S) ->
+ %% The tuple must only be built once.
+ F0 = F1 = {S,S},
+ [F0,F1].
+
+do_unnecessary_building_2({a,_,_}=T) ->
+ %% The T term should not be rebuilt.
+ {b,
+ [_,_] = [T,none],
+ x}.
+
id(I) -> I.
diff --git a/lib/compiler/test/float_SUITE.erl b/lib/compiler/test/float_SUITE.erl
index afc04fd440..fb8da37f4f 100644
--- a/lib/compiler/test/float_SUITE.erl
+++ b/lib/compiler/test/float_SUITE.erl
@@ -118,6 +118,7 @@ math_functions(Config) when is_list(Config) ->
?line 0.0 = math:sinh(0),
?line 1.0 = math:cosh(0),
?line 0.0 = math:tanh(0),
+ 1.0 = math:log2(2),
?line 1.0 = math:log10(10),
?line -1.0 = math:cos(math:pi()),
?line 1.0 = math:exp(0),
@@ -136,6 +137,7 @@ math_functions(Config) when is_list(Config) ->
?line 0.0 = math:sinh(id(0)),
?line 1.0 = math:cosh(id(0)),
?line 0.0 = math:tanh(id(0)),
+ 1.0 = math:log2(id(2)),
?line 1.0 = math:log10(id(10)),
?line 1.0 = math:exp(id(0)),
?line 0.0 = math:log(id(1)),
diff --git a/lib/compiler/test/guard_SUITE.erl b/lib/compiler/test/guard_SUITE.erl
index eb205d09a7..08279d9408 100644
--- a/lib/compiler/test/guard_SUITE.erl
+++ b/lib/compiler/test/guard_SUITE.erl
@@ -30,7 +30,7 @@
old_guard_tests/1,
build_in_guard/1,gbif/1,
t_is_boolean/1,is_function_2/1,
- tricky/1,rel_ops/1,literal_type_tests/1,
+ tricky/1,rel_ops/1,rel_op_combinations/1,literal_type_tests/1,
basic_andalso_orelse/1,traverse_dcd/1,
check_qlc_hrl/1,andalso_semi/1,t_tuple_size/1,binary_part/1,
bad_constants/1,bad_guards/1]).
@@ -42,12 +42,13 @@ all() ->
[{group,p}].
groups() ->
- [{p,test_lib:parallel(),
+ [{p,[parallel],
[misc,const_cond,basic_not,complex_not,nested_nots,
semicolon,complex_semicolon,comma,or_guard,
more_or_guards,complex_or_guards,and_guard,xor_guard,
more_xor_guards,build_in_guard,old_guard_tests,gbif,
- t_is_boolean,is_function_2,tricky,rel_ops,
+ t_is_boolean,is_function_2,tricky,
+ rel_ops,rel_op_combinations,
literal_type_tests,basic_andalso_orelse,traverse_dcd,
check_qlc_hrl,andalso_semi,t_tuple_size,binary_part,
bad_constants,bad_guards]}].
@@ -330,7 +331,15 @@ complex_semicolon(Config) when is_list(Config) ->
?line ok = csemi6({a,b}, 0),
?line ok = csemi6({}, 3),
?line ok = csemi6({a,b,c}, 3),
-
+
+ %% 7
+ error = csemi7(#{a=>1}, 1, 0),
+ error = csemi7(<<>>, 1, 0),
+ ok = csemi7(#{a=>1}, 3, 0),
+ ok = csemi7(#{a=>1}, 0, 3),
+ ok = csemi7(#{a=>1}, 3, 3),
+ ok = csemi7(#{a=>1, b=>3}, 0, 0),
+
ok.
csemi1(Type, Val) when is_list(Val), Type == float;
@@ -442,6 +451,9 @@ csemi5(_, _) -> error.
csemi6(A, B) when hd([tuple_size(A)]) > 1; abs(B) > 2 -> ok;
csemi6(_, _) -> error.
+csemi7(A, B, C) when A#{a:=B} > #{a=>1}; abs(C) > 2 -> ok;
+csemi7(_, _, _) -> error.
+
comma(Config) when is_list(Config) ->
%% ',' combinations of literal true/false.
@@ -1122,6 +1134,231 @@ rel_ops(Config) when is_list(Config) ->
-undef(TestOp).
+rel_op_combinations(Config) when is_list(Config) ->
+ Digits0 = lists:seq(16#0030, 16#0039) ++
+ lists:seq(16#0660, 16#0669) ++
+ lists:seq(16#06F0, 16#06F9),
+ Digits = gb_sets:from_list(Digits0),
+ rel_op_combinations_1(16#0700, Digits),
+
+ BrokenRange0 = lists:seq(3, 5) ++
+ lists:seq(10, 12) ++ lists:seq(14, 20),
+ BrokenRange = gb_sets:from_list(BrokenRange0),
+ rel_op_combinations_2(30, BrokenRange),
+
+ Red0 = [{I,2*I} || I <- lists:seq(0, 50)] ++
+ [{I,5*I} || I <- lists:seq(51, 80)],
+ Red = gb_trees:from_orddict(Red0),
+ rel_op_combinations_3(100, Red).
+
+rel_op_combinations_1(0, _) ->
+ ok;
+rel_op_combinations_1(N, Digits) ->
+ Bool = gb_sets:is_member(N, Digits),
+ Bool = is_digit_1(N),
+ Bool = is_digit_2(N),
+ Bool = is_digit_3(N),
+ Bool = is_digit_4(N),
+ Bool = is_digit_5(N),
+ Bool = is_digit_6(N),
+ Bool = is_digit_7(N),
+ Bool = is_digit_8(N),
+ rel_op_combinations_1(N-1, Digits).
+
+is_digit_1(X) when 16#0660 =< X, X =< 16#0669 -> true;
+is_digit_1(X) when 16#0030 =< X, X =< 16#0039 -> true;
+is_digit_1(X) when 16#06F0 =< X, X =< 16#06F9 -> true;
+is_digit_1(_) -> false.
+
+is_digit_2(X) when (16#0030-1) < X, X =< 16#0039 -> true;
+is_digit_2(X) when (16#0660-1) < X, X =< 16#0669 -> true;
+is_digit_2(X) when (16#06F0-1) < X, X =< 16#06F9 -> true;
+is_digit_2(_) -> false.
+
+is_digit_3(X) when 16#0660 =< X, X < (16#0669+1) -> true;
+is_digit_3(X) when 16#0030 =< X, X < (16#0039+1) -> true;
+is_digit_3(X) when 16#06F0 =< X, X < (16#06F9+1) -> true;
+is_digit_3(_) -> false.
+
+is_digit_4(X) when (16#0660-1) < X, X < (16#0669+1) -> true;
+is_digit_4(X) when (16#0030-1) < X, X < (16#0039+1) -> true;
+is_digit_4(X) when (16#06F0-1) < X, X < (16#06F9+1) -> true;
+is_digit_4(_) -> false.
+
+is_digit_5(X) when X >= 16#0660, X =< 16#0669 -> true;
+is_digit_5(X) when X >= 16#0030, X =< 16#0039 -> true;
+is_digit_5(X) when X >= 16#06F0, X =< 16#06F9 -> true;
+is_digit_5(_) -> false.
+
+is_digit_6(X) when X > (16#0660-1), X =< 16#0669 -> true;
+is_digit_6(X) when X > (16#0030-1), X =< 16#0039 -> true;
+is_digit_6(X) when X > (16#06F0-1), X =< 16#06F9 -> true;
+is_digit_6(_) -> false.
+
+is_digit_7(X) when 16#0660 =< X, X =< 16#0669 -> true;
+is_digit_7(X) when 16#0030 =< X, X =< 16#003A, X =/= 16#003A -> true;
+is_digit_7(X) when 16#06F0 =< X, X =< 16#06F9 -> true;
+is_digit_7(_) -> false.
+
+is_digit_8(X) when X =< 16#0039, X > (16#0030-1) -> true;
+is_digit_8(X) when X =< 16#06F9, X > (16#06F0-1) -> true;
+is_digit_8(X) when X =< 16#0669, X > (16#0660-1) -> true;
+is_digit_8(16#0670) -> false;
+is_digit_8(_) -> false.
+
+rel_op_combinations_2(0, _) ->
+ ok;
+rel_op_combinations_2(N, Range) ->
+ Bool = gb_sets:is_member(N, Range),
+ Bool = broken_range_1(N),
+ Bool = broken_range_2(N),
+ Bool = broken_range_3(N),
+ Bool = broken_range_4(N),
+ Bool = broken_range_5(N),
+ Bool = broken_range_6(N),
+ Bool = broken_range_7(N),
+ Bool = broken_range_8(N),
+ Bool = broken_range_9(N),
+ Bool = broken_range_10(N),
+ Bool = broken_range_11(N),
+ Bool = broken_range_12(N),
+ Bool = broken_range_13(N),
+ rel_op_combinations_2(N-1, Range).
+
+broken_range_1(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_1(X) when X >= 3, X =< 5 -> true;
+broken_range_1(_) -> false.
+
+broken_range_2(X) when X >= 10, X =< 12 -> true;
+broken_range_2(X) when X >= 14, X =< 20 -> true;
+broken_range_2(X) when X >= 3, X =< 5 -> true;
+broken_range_2(_) -> false.
+
+broken_range_3(X) when X >= 10, X =< 12 -> true;
+broken_range_3(X) when X >= 14, X < 21 -> true;
+broken_range_3(3) -> true;
+broken_range_3(4) -> true;
+broken_range_3(5) -> true;
+broken_range_3(_) -> false.
+
+broken_range_4(X) when X =< 5, X >= 3 -> true;
+broken_range_4(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_4(X) when X =< 100 -> false;
+broken_range_4(_) -> false.
+
+broken_range_5(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_5(X) when X > 2, X =< 5 -> true;
+broken_range_5(_) -> false.
+
+broken_range_6(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_6(X) when X > 2, X < 6 -> true;
+broken_range_6(_) -> false.
+
+broken_range_7(X) when X > 2, X < 6 -> true;
+broken_range_7(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_7(X) when X > 30 -> false;
+broken_range_7(_) -> false.
+
+broken_range_8(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_8(X) when X =:= 3 -> true;
+broken_range_8(X) when X >= 3, X =< 5 -> true;
+broken_range_8(_) -> false.
+
+broken_range_9(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_9(X) when X =:= 13 -> false;
+broken_range_9(X) when X >= 3, X =< 5 -> true;
+broken_range_9(_) -> false.
+
+broken_range_10(X) when X >= 3, X =< 5 -> true;
+broken_range_10(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_10(X) when X =/= 13 -> false;
+broken_range_10(_) -> false.
+
+broken_range_11(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_11(X) when is_tuple(X), X =:= 10 -> true;
+broken_range_11(X) when X >= 3, X =< 5 -> true;
+broken_range_11(_) -> false.
+
+broken_range_12(X) when X >= 3, X =< 5 -> true;
+broken_range_12(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_12(X) when X < 30, X > 20 -> false;
+broken_range_12(_) -> false.
+
+broken_range_13(X) when X >= 10, X =< 20, 13 =/= X -> true;
+broken_range_13(X) when X >= 3, X =< 5 -> true;
+broken_range_13(_) -> false.
+
+rel_op_combinations_3(0, _) ->
+ ok;
+rel_op_combinations_3(N, Red) ->
+ Val = case gb_trees:lookup(N, Red) of
+ none -> none;
+ {value,V} -> V
+ end,
+ Val = redundant_1(N),
+ Val = redundant_2(N),
+ Val = redundant_3(N),
+ Val = redundant_4(N),
+ Val = redundant_5(N),
+ Val = redundant_6(N),
+ Val = redundant_7(N),
+ Val = redundant_8(N),
+ Val = redundant_9(N),
+ Val = redundant_10(N),
+ Val = redundant_11(N),
+ rel_op_combinations_3(N-1, Red).
+
+redundant_1(X) when X >= 51, X =< 80 -> 5*X;
+redundant_1(X) when X < 51 -> 2*X;
+redundant_1(_) -> none.
+
+redundant_2(X) when X < 51 -> 2*X;
+redundant_2(X) when X >= 51, X =< 80 -> 5*X;
+redundant_2(_) -> none.
+
+redundant_3(X) when X < 51 -> 2*X;
+redundant_3(X) when X =< 80, X >= 51 -> 5*X;
+redundant_3(X) when X =/= 100 -> none;
+redundant_3(_) -> none.
+
+redundant_4(X) when X < 51 -> 2*X;
+redundant_4(X) when X =< 80, X > 50 -> 5*X;
+redundant_4(X) when X =/= 100 -> none;
+redundant_4(_) -> none.
+
+redundant_5(X) when X < 51 -> 2*X;
+redundant_5(X) when X > 50, X < 81 -> 5*X;
+redundant_5(X) when X =< 10 -> none;
+redundant_5(_) -> none.
+
+redundant_6(X) when X > 50, X =< 80 -> 5*X;
+redundant_6(X) when X < 51 -> 2*X;
+redundant_6(_) -> none.
+
+redundant_7(X) when is_integer(X), X >= 51, X =< 80 -> 5*X;
+redundant_7(X) when is_integer(X), X < 51 -> 2*X;
+redundant_7(_) -> none.
+
+redundant_8(X) when X >= 51, X =< 80 -> 5*X;
+redundant_8(X) when X < 51 -> 2*X;
+redundant_8(_) -> none.
+
+redundant_9(X) when X >= 51, X =< 80 -> 5*X;
+redundant_9(X) when X < 51 -> 2*X;
+redundant_9(90) -> none;
+redundant_9(X) when X =/= 90 -> none;
+redundant_9(_) -> none.
+
+redundant_10(X) when X >= 51, X =< 80 -> 5*X;
+redundant_10(X) when X < 51 -> 2*X;
+redundant_10(90) -> none;
+redundant_10(X) when X =:= 90 -> none;
+redundant_10(_) -> none.
+
+redundant_11(X) when X < 51 -> 2*X;
+redundant_11(X) when X =:= 10 -> 2*X;
+redundant_11(X) when X >= 51, X =< 80 -> 5*X;
+redundant_11(_) -> none.
%% Test type tests on literal values. (From emulator test suites.)
literal_type_tests(Config) when is_list(Config) ->
@@ -1556,6 +1793,36 @@ bad_constants(Config) when is_list(Config) ->
bad_guards(Config) when is_list(Config) ->
if erlang:float(self()); true -> ok end,
+
+ fc(catch bad_guards_1(1, [])),
+ fc(catch bad_guards_1(1, [2])),
+ fc(catch bad_guards_1(atom, [2])),
+
+ fc(catch bad_guards_2(#{a=>0,b=>0}, [])),
+ fc(catch bad_guards_2(#{a=>0,b=>0}, [x])),
+ fc(catch bad_guards_2(not_a_map, [x])),
+ fc(catch bad_guards_2(42, [x])),
+
+ fc(catch bad_guards_3(#{a=>0,b=>0}, [])),
+ fc(catch bad_guards_3(#{a=>0,b=>0}, [x])),
+ fc(catch bad_guards_3(not_a_map, [x])),
+ fc(catch bad_guards_3(42, [x])),
+
+ ok.
+
+%% beam_bool used to produce GC BIF instructions whose
+%% Live operands included uninitialized registers.
+
+bad_guards_1(X, [_]) when {{X}}, -X ->
+ ok.
+
+bad_guards_2(M, [_]) when M#{a := 0, b => 0}, map_size(M) ->
+ ok.
+
+%% beam_type used to produce an GC BIF instruction whose Live operand
+%% included uninitialized registers.
+
+bad_guards_3(M, [_]) when is_map(M) andalso M#{a := 0, b => 0}, length(M) ->
ok.
%% Call this function to turn off constant propagation.
diff --git a/lib/compiler/test/lc_SUITE.erl b/lib/compiler/test/lc_SUITE.erl
index 398398a397..62bada1407 100644
--- a/lib/compiler/test/lc_SUITE.erl
+++ b/lib/compiler/test/lc_SUITE.erl
@@ -18,12 +18,12 @@
%%
-module(lc_SUITE).
--author('[email protected]').
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2,
init_per_testcase/2,end_per_testcase/2,
basic/1,deeply_nested/1,no_generator/1,
- empty_generator/1,no_export/1]).
+ empty_generator/1,no_export/1,shadow/1,
+ effect/1]).
-include_lib("test_server/include/test_server.hrl").
@@ -31,10 +31,18 @@ suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
test_lib:recompile(?MODULE),
- [basic, deeply_nested, no_generator, empty_generator, no_export].
+ [{group,p}].
groups() ->
- [].
+ [{p,test_lib:parallel(),
+ [basic,
+ deeply_nested,
+ no_generator,
+ empty_generator,
+ no_export,
+ shadow,
+ effect
+ ]}].
init_per_suite(Config) ->
Config.
@@ -59,34 +67,34 @@ end_per_testcase(Case, Config) when is_atom(Case), is_list(Config) ->
ok.
basic(Config) when is_list(Config) ->
- ?line L0 = lists:seq(1, 10),
- ?line L1 = my_map(fun(X) -> {x,X} end, L0),
- ?line L1 = [{x,X} || X <- L0],
- ?line L0 = my_map(fun({x,X}) -> X end, L1),
- ?line [1,2,3,4,5] = [X || X <- L0, X < 6],
- ?line [4,5,6] = [X || X <- L0, X > 3, X < 7],
- ?line [] = [X || X <- L0, X > 32, X < 7],
- ?line [1,3,5,7,9] = [X || X <- L0, odd(X)],
- ?line [2,4,6,8,10] = [X || X <- L0, not odd(X)],
- ?line [1,3,5,9] = [X || X <- L0, odd(X), X =/= 7],
- ?line [2,4,8,10] = [X || X <- L0, not odd(X), X =/= 6],
+ L0 = lists:seq(1, 10),
+ L1 = my_map(fun(X) -> {x,X} end, L0),
+ L1 = [{x,X} || X <- L0],
+ L0 = my_map(fun({x,X}) -> X end, L1),
+ [1,2,3,4,5] = [X || X <- L0, X < 6],
+ [4,5,6] = [X || X <- L0, X > 3, X < 7],
+ [] = [X || X <- L0, X > 32, X < 7],
+ [1,3,5,7,9] = [X || X <- L0, odd(X)],
+ [2,4,6,8,10] = [X || X <- L0, not odd(X)],
+ [1,3,5,9] = [X || X <- L0, odd(X), X =/= 7],
+ [2,4,8,10] = [X || X <- L0, not odd(X), X =/= 6],
%% Append is specially handled.
- ?line [1,3,5,9,2,4,8,10] = [X || X <- L0, odd(X), X =/= 7] ++
+ [1,3,5,9,2,4,8,10] = [X || X <- L0, odd(X), X =/= 7] ++
[X || X <- L0, not odd(X), X =/= 6],
%% Guards BIFs are evaluated in guard context. Weird, but true.
- ?line [{a,b,true},{x,y,true,true}] = [X || X <- tuple_list(), element(3, X)],
+ [{a,b,true},{x,y,true,true}] = [X || X <- tuple_list(), element(3, X)],
%% Filter expressions with andalso/orelse.
- ?line "abc123" = alphanum("?abc123.;"),
+ "abc123" = alphanum("?abc123.;"),
%% Error cases.
- ?line [] = [{xx,X} || X <- L0, element(2, X) == no_no_no],
- ?line {'EXIT',_} = (catch [X || X <- L1, list_to_atom(X) == dum]),
- ?line [] = [X || X <- L1, X+1 < 2],
- ?line {'EXIT',_} = (catch [X || X <- L1, odd(X)]),
- ?line fc([x], catch [E || E <- id(x)]),
+ [] = [{xx,X} || X <- L0, element(2, X) == no_no_no],
+ {'EXIT',_} = (catch [X || X <- L1, list_to_atom(X) == dum]),
+ [] = [X || X <- L1, X+1 < 2],
+ {'EXIT',_} = (catch [X || X <- L1, odd(X)]),
+ fc([x], catch [E || E <- id(x)]),
ok.
tuple_list() ->
@@ -116,12 +124,12 @@ deeply_nested_1() ->
X16 <- [4],X17 <- [3],X18 <- [fun() -> X16+X17 end],X19 <- [2],X20 <- [1]].
no_generator(Config) when is_list(Config) ->
- ?line Seq = lists:seq(-10, 17),
- ?line [no_gen_verify(no_gen(A, B), A, B) || A <- Seq, B <- Seq],
+ Seq = lists:seq(-10, 17),
+ [no_gen_verify(no_gen(A, B), A, B) || A <- Seq, B <- Seq],
%% Literal expression, for coverage.
- ?line [a] = [a || true],
- ?line [a,b,c] = [a || true] ++ [b,c],
+ [a] = [a || true],
+ [a,b,c] = [a || true] ++ [b,c],
ok.
no_gen(A, B) ->
@@ -174,13 +182,51 @@ no_gen_eval(Fun, Res) ->
no_gen_one_more(A, B) -> A + 1 =:= B.
empty_generator(Config) when is_list(Config) ->
- ?line [] = [X || {X} <- [], (false or (X/0 > 3))],
+ [] = [X || {X} <- [], (false or (X/0 > 3))],
ok.
no_export(Config) when is_list(Config) ->
[] = [ _X = a || false ] ++ [ _X = a || false ],
ok.
+%% Test that variables in list comprehensions are
+%% correctly shadowed.
+
+shadow(Config) when is_list(Config) ->
+ Shadowed = nomatch,
+ _ = id(Shadowed), %Eliminate warning.
+ L = [{Shadowed,Shadowed+1} || Shadowed <- lists:seq(7, 9)],
+ [{7,8},{8,9},{9,10}] = id(L),
+ [8,9] = id([Shadowed || {_,Shadowed} <- id(L),
+ Shadowed < 10]),
+ ok.
+
+effect(Config) when is_list(Config) ->
+ [{42,{a,b,c}}] =
+ do_effect(fun(F, L) ->
+ [F({V1,V2}) ||
+ #{<<1:500>>:=V1,<<2:301>>:=V2} <- L],
+ ok
+ end, id([#{},x,#{<<1:500>>=>42,<<2:301>>=>{a,b,c}}])),
+
+ %% Will trigger the time-trap timeout if not tail-recursive.
+ case ?MODULE of
+ lc_SUITE ->
+ _ = [{'EXIT',{badarg,_}} =
+ (catch binary_to_atom(<<C/utf8>>, utf8)) ||
+ C <- lists:seq(16#10000, 16#FFFFF)];
+ _ ->
+ ok
+ end,
+
+ ok.
+
+do_effect(Lc, L) ->
+ put(?MODULE, []),
+ F = fun(V) -> put(?MODULE, [V|get(?MODULE)]) end,
+ ok = Lc(F, L),
+ lists:reverse(erase(?MODULE)).
+
id(I) -> I.
fc(Args, {'EXIT',{function_clause,[{?MODULE,_,Args,_}|_]}}) -> ok;
diff --git a/lib/compiler/test/map_SUITE.erl b/lib/compiler/test/map_SUITE.erl
index 75efce9d7b..cfa8262701 100644
--- a/lib/compiler/test/map_SUITE.erl
+++ b/lib/compiler/test/map_SUITE.erl
@@ -61,7 +61,9 @@
suite() -> [].
-all() -> [
+all() ->
+ test_lib:recompile(?MODULE),
+ [
%% literals
t_build_and_match_literals,
t_update_literals, t_match_and_update_literals,
@@ -317,6 +319,12 @@ t_update_exact(Config) when is_list(Config) ->
{'EXIT',{badarg,_}} = (catch M0#{42=>v1,42.0:=v2,42:=v3}),
{'EXIT',{badarg,_}} = (catch <<>>#{nonexisting:=val}),
{'EXIT',{badarg,_}} = (catch M0#{<<0:257>> := val}), %% limitation
+
+ %% A workaround for a bug allowed an empty map to be updated.
+ {'EXIT',{badarg,_}} = (catch (id(#{}))#{a:=1}),
+ {'EXIT',{badarg,_}} = (catch #{}#{a:=1}),
+ Empty = #{},
+ {'EXIT',{badarg,_}} = (catch Empty#{a:=1}),
ok.
t_update_values(Config) when is_list(Config) ->
@@ -633,6 +641,7 @@ t_build_and_match_nil(Config) when is_list(Config) ->
"treat" => V2,
[] => V1 }),
#{ [] := V3, [] := V3 } = id(#{ [] => V1, [] => V3 }),
+ #{ <<1>> := V3, [] := V1 } = id(#{ [] => V1, <<1>> => V3 }),
ok.
t_build_and_match_structure(Config) when is_list(Config) ->
diff --git a/lib/compiler/test/match_SUITE.erl b/lib/compiler/test/match_SUITE.erl
index ae7d764535..7522ee985f 100644
--- a/lib/compiler/test/match_SUITE.erl
+++ b/lib/compiler/test/match_SUITE.erl
@@ -22,7 +22,8 @@
init_per_group/2,end_per_group/2,
pmatch/1,mixed/1,aliases/1,match_in_call/1,
untuplify/1,shortcut_boolean/1,letify_guard/1,
- selectify/1,underscore/1,coverage/1]).
+ selectify/1,underscore/1,match_map/1,map_vars_used/1,
+ coverage/1]).
-include_lib("test_server/include/test_server.hrl").
@@ -33,9 +34,10 @@ all() ->
[{group,p}].
groups() ->
- [{p,test_lib:parallel(),
+ [{p,[parallel],
[pmatch,mixed,aliases,match_in_call,untuplify,
- shortcut_boolean,letify_guard,selectify,underscore,coverage]}].
+ shortcut_boolean,letify_guard,selectify,
+ underscore,match_map,map_vars_used,coverage]}].
init_per_suite(Config) ->
@@ -400,6 +402,36 @@ underscore(Config) when is_list(Config) ->
_ = is_list(Config),
ok.
+-record(s, {map,t}).
+
+match_map(Config) when is_list(Config) ->
+ Map = #{key=>{x,y},ignore=>anything},
+ #s{map=Map,t={x,y}} = do_match_map(#s{map=Map}),
+ {a,#{k:={a,b,c}}} = do_match_map_2(#{k=>{a,b,c}}),
+ ok.
+
+do_match_map(#s{map=#{key:=Val}}=S) ->
+ %% Would crash with a 'badarg' exception.
+ S#s{t=Val}.
+
+do_match_map_2(Map) ->
+ case {a,Map} of
+ {a,#{k:=_}}=Tuple ->
+ Tuple
+ end.
+
+map_vars_used(Config) when is_list(Config) ->
+ {some,value} = do_map_vars_used(a, b, #{{a,b}=>42,v=>{some,value}}),
+ ok.
+
+do_map_vars_used(X, Y, Map) ->
+ case {X,Y} of
+ T ->
+ %% core_lib:is_var_used/2 would not consider T used.
+ #{T:=42,v:=Val} = Map,
+ Val
+ end.
+
coverage(Config) when is_list(Config) ->
%% Cover beam_dead.
ok = coverage_1(x, a),
diff --git a/lib/compiler/test/misc_SUITE.erl b/lib/compiler/test/misc_SUITE.erl
index 44c7161530..68a31f14d5 100644
--- a/lib/compiler/test/misc_SUITE.erl
+++ b/lib/compiler/test/misc_SUITE.erl
@@ -60,7 +60,7 @@ all() ->
[{group,p}].
groups() ->
- [{p,[],%%test_lib:parallel(),
+ [{p,[],
[tobias,empty_string,md5,silly_coverage,
confused_literals,integer_encoding,override_bif]}].
@@ -225,14 +225,15 @@ silly_coverage(Config) when is_list(Config) ->
{label,2}|non_proper_list]}],99},
?line expect_error(fun() -> beam_bool:module(BoolInput, []) end),
- %% beam_dead
+ %% beam_dead. This is tricky. Our function must look OK to
+ %% beam_utils:clean_labels/1, but must crash beam_dead.
DeadInput = {?MODULE,[{foo,0}],[],
[{function,foo,0,2,
[{label,1},
{func_info,{atom,?MODULE},{atom,foo},0},
{label,2},
- {jump,bad}]}],99},
- ?line expect_error(fun() -> beam_block:module(DeadInput, []) end),
+ {test,is_eq_exact,{f,1},[bad,operands]}]}],99},
+ expect_error(fun() -> beam_dead:module(DeadInput, []) end),
%% beam_clean
CleanInput = {?MODULE,[{foo,0}],[],
@@ -279,6 +280,14 @@ silly_coverage(Config) when is_list(Config) ->
{label,2}|non_proper_list]}],99},
expect_error(fun() -> beam_z:module(BeamZInput, []) end),
+ %% beam_validator.
+ BeamValInput = {?MODULE,[{foo,0}],[],
+ [{function,foo,0,2,
+ [{label,1},
+ {func_info,{atom,?MODULE},{atom,foo},0},
+ {label,2}|non_proper_list]}],99},
+ expect_error(fun() -> beam_validator:module(BeamValInput, []) end),
+
ok.
expect_error(Fun) ->
diff --git a/lib/compiler/test/receive_SUITE.erl b/lib/compiler/test/receive_SUITE.erl
index 00a6e900d4..fb82bf6101 100644
--- a/lib/compiler/test/receive_SUITE.erl
+++ b/lib/compiler/test/receive_SUITE.erl
@@ -187,12 +187,13 @@ ref_opt(Config) when is_list(Config) ->
end.
ref_opt_1(Config) ->
- ?line DataDir = ?config(data_dir, Config),
- ?line PrivDir = ?config(priv_dir, Config),
+ DataDir = ?config(data_dir, Config),
+ PrivDir = ?config(priv_dir, Config),
Sources = filelib:wildcard(filename:join([DataDir,"ref_opt","*.{erl,S}"])),
- ?line test_lib:p_run(fun(Src) ->
- do_ref_opt(Src, PrivDir)
- end, Sources),
+ test_lib:p_run(fun(Src) ->
+ do_ref_opt(Src, PrivDir)
+ end, Sources),
+ cover_recv_instructions(),
ok.
do_ref_opt(Source, PrivDir) ->
@@ -202,9 +203,9 @@ do_ref_opt(Source, PrivDir) ->
{outdir,PrivDir}] ++
[from_asm || Ext =:= ".S" ]),
Base = filename:rootname(filename:basename(Source), Ext),
- code:purge(list_to_atom(Base)),
- BeamFile = filename:join(PrivDir, Base),
- code:load_abs(BeamFile),
+ code:purge(list_to_atom(Base)),
+ BeamFile = filename:join(PrivDir, Base),
+ code:load_abs(BeamFile),
ok = Mod:Mod(),
{beam_file,Mod,_,_,_,Code} = beam_disasm:file(BeamFile),
case Base of
@@ -232,6 +233,27 @@ collect_recv_opt_instrs(Code) ->
end] || {function,_,_,_,Is} <- Code],
lists:append(L).
+cover_recv_instructions() ->
+ %% We want to cover the handling of recv_mark and recv_set in beam_utils.
+ %% Since those instructions are introduced in a late optimization pass,
+ %% beam_utils:live_opt() will not see them unless the compilation is
+ %% started from a .S file. The compile_SUITE:asm/1 test case will
+ %% compile all test suite files to .S and then run them through the
+ %% compiler again.
+ %%
+ %% Here will we will ensure that this modules contains recv_mark
+ %% and recv_set instructions.
+ Pid = spawn_link(fun() ->
+ receive {Parent,Ref} ->
+ Parent ! Ref
+ end
+ end),
+ Ref = make_ref(),
+ Pid ! {self(),Ref},
+ receive
+ Ref -> ok
+ end.
+
export(Config) when is_list(Config) ->
Ref = make_ref(),
?line self() ! {result,Ref,42},
diff --git a/lib/compiler/test/record_SUITE.erl b/lib/compiler/test/record_SUITE.erl
index f736e14bf6..8cc90026ec 100644
--- a/lib/compiler/test/record_SUITE.erl
+++ b/lib/compiler/test/record_SUITE.erl
@@ -246,6 +246,14 @@ record_test_2(Config) when is_list(Config) ->
?line Barf = update_barf(Barf0),
?line #barf{a="abc",b=1} = id(Barf),
+ %% Test optimization of is_record/3.
+ false = case id({a,b}) of
+ {_,_}=Tuple -> is_record(Tuple, foo)
+ end,
+ false = case id(true) of
+ true=Bool -> is_record(Bool, foo)
+ end,
+
ok.
record_test_3(Config) when is_list(Config) ->
diff --git a/lib/compiler/test/test_lib.erl b/lib/compiler/test/test_lib.erl
index a8befbecd9..a5e2855f8c 100644
--- a/lib/compiler/test/test_lib.erl
+++ b/lib/compiler/test/test_lib.erl
@@ -18,11 +18,13 @@
%%
-module(test_lib).
--include("test_server.hrl").
+-include_lib("test_server/include/test_server.hrl").
-compile({no_auto_import,[binary_part/2]}).
--export([recompile/1,parallel/0,uniq/0,opt_opts/1,get_data_dir/1,
+-export([id/1,recompile/1,parallel/0,uniq/0,opt_opts/1,get_data_dir/1,
smoke_disasm/1,p_run/2,binary_part/2]).
+id(I) -> I.
+
recompile(Mod) when is_atom(Mod) ->
case whereis(cover_server) of
undefined -> ok;
@@ -44,6 +46,10 @@ smoke_disasm(File) when is_list(File) ->
Res = beam_disasm:file(File),
{beam_file,_Mod} = {element(1, Res),element(2, Res)}.
+%% If we are running cover, we don't want to run test cases that
+%% invokes the compiler in parallel, as doing so would probably
+%% be slower than running them sequentially.
+
parallel() ->
case ?t:is_cover() orelse erlang:system_info(schedulers) =:= 1 of
true -> [];
@@ -90,13 +96,18 @@ get_data_dir(Config) ->
%% Will fail the test case if there were any errors.
p_run(Test, List) ->
+ S = erlang:system_info(schedulers),
N = case ?t:is_cover() of
false ->
- erlang:system_info(schedulers);
+ S + 1;
true ->
- %% Cover is running. Using more than one process
- %% will probably only slow down compilation.
- 1
+ %% Cover is running. Using too many processes
+ %% could slow us down. Measurements on my computer
+ %% showed that using 4 parallel processes was
+ %% slightly faster than using 3. Using more than
+ %% 4 would not buy us much and could actually be
+ %% slower.
+ max(S, 4)
end,
p_run_loop(Test, List, N, [], 0, 0).
diff --git a/lib/compiler/test/trycatch_SUITE.erl b/lib/compiler/test/trycatch_SUITE.erl
index 4530d08c77..80d93fbfa4 100644
--- a/lib/compiler/test/trycatch_SUITE.erl
+++ b/lib/compiler/test/trycatch_SUITE.erl
@@ -24,7 +24,8 @@
catch_oops/1,after_oops/1,eclectic/1,rethrow/1,
nested_of/1,nested_catch/1,nested_after/1,
nested_horrid/1,last_call_optimization/1,bool/1,
- plain_catch_coverage/1,andalso_orelse/1,get_in_try/1]).
+ plain_catch_coverage/1,andalso_orelse/1,get_in_try/1,
+ hockey/1]).
-include_lib("test_server/include/test_server.hrl").
@@ -35,11 +36,12 @@ all() ->
[{group,p}].
groups() ->
- [{p,test_lib:parallel(),
+ [{p,[parallel],
[basic,lean_throw,try_of,try_after,catch_oops,
after_oops,eclectic,rethrow,nested_of,nested_catch,
nested_after,nested_horrid,last_call_optimization,
- bool,plain_catch_coverage,andalso_orelse,get_in_try]}].
+ bool,plain_catch_coverage,andalso_orelse,get_in_try,
+ hockey]}].
init_per_suite(Config) ->
@@ -790,7 +792,6 @@ nested_after_1({X1,C1,V1},
nested_horrid(Config) when is_list(Config) ->
- _V = {make_ref(),nested_horrid,4.711},
{[true,true],{[true,1.0],1.0}} =
nested_horrid_1({true,void,void}, 1.0),
ok.
@@ -944,3 +945,14 @@ get_valid_line([_|T]=Path, Annotations) ->
_:not_found ->
get_valid_line(T, Annotations)
end.
+
+hockey(_) ->
+ {'EXIT',{{badmatch,_},[_|_]}} = (catch hockey()),
+ ok.
+
+hockey() ->
+ %% beam_jump used to generate a call into the try block.
+ %% beam_validator disapproved.
+ receive _ -> (b = fun() -> ok end)
+ + hockey, +x after 0 -> ok end, try (a = fun() -> ok end) + hockey, +
+ y catch _ -> ok end.
diff --git a/lib/compiler/test/warnings_SUITE.erl b/lib/compiler/test/warnings_SUITE.erl
index be0348a92d..dcd3910926 100644
--- a/lib/compiler/test/warnings_SUITE.erl
+++ b/lib/compiler/test/warnings_SUITE.erl
@@ -39,7 +39,7 @@
guard/1,bad_arith/1,bool_cases/1,bad_apply/1,
files/1,effect/1,bin_opt_info/1,bin_construction/1,
comprehensions/1,maps/1,redundant_boolean_clauses/1,
- latin1_fallback/1]).
+ latin1_fallback/1,underscore/1]).
% Default timetrap timeout (set in init_per_testcase).
-define(default_timeout, ?t:minutes(2)).
@@ -64,7 +64,8 @@ groups() ->
[pattern,pattern2,pattern3,pattern4,guard,
bad_arith,bool_cases,bad_apply,files,effect,
bin_opt_info,bin_construction,comprehensions,maps,
- redundant_boolean_clauses,latin1_fallback]}].
+ redundant_boolean_clauses,latin1_fallback,
+ underscore]}].
init_per_suite(Config) ->
Config.
@@ -280,11 +281,12 @@ bad_arith(Config) when is_list(Config) ->
{3,sys_core_fold,{eval_failure,badarith}},
{9,sys_core_fold,nomatch_guard},
{9,sys_core_fold,{eval_failure,badarith}},
+ {9,sys_core_fold,{no_effect,{erlang,is_integer,1}}},
{10,sys_core_fold,nomatch_guard},
{10,sys_core_fold,{eval_failure,badarith}},
{15,sys_core_fold,{eval_failure,badarith}}
] }}],
- ?line [] = run(Config, Ts),
+ [] = run(Config, Ts),
ok.
bool_cases(Config) when is_list(Config) ->
@@ -578,11 +580,11 @@ maps(Config) when is_list(Config) ->
<<"
t() ->
M = {a,[]},
- {'EXIT',{badarg,_}} = (catch(M#{ a => 1})),
+ {'EXIT',{badarg,_}} = (catch(M#{ a => 1 })),
ok.
">>,
[],
- {warnings,[{4,v3_kernel,bad_map}]}},
+ {warnings,[{4,sys_core_fold,{eval_failure,badarg}}]}},
{bad_map_src2,
<<"
t() ->
@@ -592,7 +594,7 @@ maps(Config) when is_list(Config) ->
id(I) -> I.
">>,
[inline],
- {warnings,[{4,v3_kernel,bad_map}]}},
+ []},
{bad_map_src3,
<<"
t() ->
@@ -678,6 +680,45 @@ latin1_fallback(Conf) when is_list(Conf) ->
ok.
+underscore(Config) when is_list(Config) ->
+ S0 = <<"f(A) ->
+ _VAR1 = <<A>>,
+ _VAR2 = {ok,A},
+ _VAR3 = [A],
+ ok.
+ g(A) ->
+ _VAR1 = A/0,
+ _VAR2 = date(),
+ ok.
+ h() ->
+ _VAR1 = fun() -> ok end,
+ ok.
+ i(A) ->
+ _VAR1 = #{A=>42},
+ ok.
+ ">>,
+ Ts0 = [{underscore0,
+ S0,
+ [],
+ {warnings,[{2,sys_core_fold,useless_building},
+ {3,sys_core_fold,useless_building},
+ {4,sys_core_fold,useless_building},
+ {7,sys_core_fold,result_ignored},
+ {8,sys_core_fold,{no_effect,{erlang,date,0}}},
+ {11,sys_core_fold,useless_building},
+ {14,sys_core_fold,useless_building}
+ ]}}],
+ [] = run(Config, Ts0),
+
+ %% Replace all "_VAR<digit>" variables with a plain underscore.
+ %% Now there should be no warnings.
+ S1 = re:replace(S0, "_VAR\\d+", "_", [global]),
+ io:format("~s\n", [S1]),
+ Ts1 = [{underscore1,S1,[],[]}],
+ [] = run(Config, Ts1),
+
+ ok.
+
%%%
%%% End of test cases.
%%%
@@ -699,10 +740,10 @@ run(Config, Tests) ->
%% Compiles a test module and returns the list of errors and warnings.
run_test(Conf, Test0, Warnings) ->
- Mod = "warnings_"++test_lib:uniq(),
- Filename = Mod ++ ".erl",
+ Module = "warnings_"++test_lib:uniq(),
+ Filename = Module ++ ".erl",
?line DataDir = ?privdir,
- Test = ["-module(", Mod, "). ", Test0],
+ Test = ["-module(", Module, "). ", Test0],
?line File = filename:join(DataDir, Filename),
?line Opts = [binary,export_all,return|Warnings],
?line ok = file:write_file(File, Test),
diff --git a/lib/crypto/c_src/crypto.c b/lib/crypto/c_src/crypto.c
index 750f3db7ef..aa99f2236e 100644
--- a/lib/crypto/c_src/crypto.c
+++ b/lib/crypto/c_src/crypto.c
@@ -242,6 +242,7 @@ static ERL_NIF_TERM aes_cfb_8_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM
static ERL_NIF_TERM aes_cfb_128_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM aes_ctr_encrypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM aes_ctr_stream_encrypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
+static ERL_NIF_TERM aes_ecb_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM rand_bytes_1(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM strong_rand_bytes_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM rand_bytes_3(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
@@ -379,6 +380,7 @@ static ErlNifFunc nif_funcs[] = {
{"aes_ctr_decrypt", 3, aes_ctr_encrypt},
{"aes_ctr_stream_encrypt", 2, aes_ctr_stream_encrypt},
{"aes_ctr_stream_decrypt", 2, aes_ctr_stream_encrypt},
+ {"aes_ecb_crypt", 3, aes_ecb_crypt},
{"rand_bytes", 1, rand_bytes_1},
{"strong_rand_bytes_nif", 1, strong_rand_bytes_nif},
{"rand_bytes", 3, rand_bytes_3},
@@ -410,7 +412,7 @@ static ErlNifFunc nif_funcs[] = {
{"bf_ecb_crypt", 3, bf_ecb_crypt},
{"blowfish_ofb64_encrypt", 3, blowfish_ofb64_encrypt},
- {"ec_key_generate", 1, ec_key_generate},
+ {"ec_key_generate", 2, ec_key_generate},
{"ecdsa_sign_nif", 4, ecdsa_sign_nif},
{"ecdsa_verify_nif", 5, ecdsa_verify_nif},
{"ecdh_compute_key_nif", 3, ecdh_compute_key_nif},
@@ -1686,14 +1688,15 @@ static ERL_NIF_TERM aes_cfb_8_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM
int new_ivlen = 0;
ERL_NIF_TERM ret;
- if (!enif_inspect_iolist_as_binary(env, argv[0], &key) || key.size != 16
+ if (!enif_inspect_iolist_as_binary(env, argv[0], &key)
+ || !(key.size == 16 || key.size == 24 || key.size == 32)
|| !enif_inspect_binary(env, argv[1], &ivec) || ivec.size != 16
|| !enif_inspect_iolist_as_binary(env, argv[2], &text)) {
return enif_make_badarg(env);
}
memcpy(ivec_clone, ivec.data, 16);
- AES_set_encrypt_key(key.data, 128, &aes_key);
+ AES_set_encrypt_key(key.data, key.size * 8, &aes_key);
AES_cfb8_encrypt((unsigned char *) text.data,
enif_make_new_binary(env, text.size, &ret),
text.size, &aes_key, ivec_clone, &new_ivlen,
@@ -1712,14 +1715,15 @@ static ERL_NIF_TERM aes_cfb_128_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TE
CHECK_OSE_CRYPTO();
- if (!enif_inspect_iolist_as_binary(env, argv[0], &key) || key.size != 16
+ if (!enif_inspect_iolist_as_binary(env, argv[0], &key)
+ || !(key.size == 16 || key.size == 24 || key.size == 32)
|| !enif_inspect_binary(env, argv[1], &ivec) || ivec.size != 16
|| !enif_inspect_iolist_as_binary(env, argv[2], &text)) {
return enif_make_badarg(env);
}
memcpy(ivec_clone, ivec.data, 16);
- AES_set_encrypt_key(key.data, 128, &aes_key);
+ AES_set_encrypt_key(key.data, key.size * 8, &aes_key);
AES_cfb128_encrypt((unsigned char *) text.data,
enif_make_new_binary(env, text.size, &ret),
text.size, &aes_key, ivec_clone, &new_ivlen,
@@ -2032,6 +2036,38 @@ static ERL_NIF_TERM chacha20_poly1305_decrypt(ErlNifEnv* env, int argc, const ER
#endif
}
+static ERL_NIF_TERM aes_ecb_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{/* (Key, Data, IsEncrypt) */
+ ErlNifBinary key_bin, data_bin;
+ AES_KEY aes_key;
+ int i;
+ unsigned char* ret_ptr;
+ ERL_NIF_TERM ret;
+
+ CHECK_OSE_CRYPTO();
+
+ if (!enif_inspect_iolist_as_binary(env, argv[0], &key_bin)
+ || (key_bin.size != 16 && key_bin.size != 32)
+ || !enif_inspect_iolist_as_binary(env, argv[1], &data_bin)
+ || data_bin.size % 16 != 0) {
+ return enif_make_badarg(env);
+ }
+
+ if (argv[2] == atom_true) {
+ i = AES_ENCRYPT;
+ AES_set_encrypt_key(key_bin.data, key_bin.size*8, &aes_key);
+ }
+ else {
+ i = AES_DECRYPT;
+ AES_set_decrypt_key(key_bin.data, key_bin.size*8, &aes_key);
+ }
+
+ ret_ptr = enif_make_new_binary(env, data_bin.size, &ret);
+ AES_ecb_encrypt(data_bin.data, ret_ptr, &aes_key, i);
+ CONSUME_REDS(env,data_bin);
+ return ret;
+}
+
static ERL_NIF_TERM rand_bytes_1(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
{/* (Bytes) */
unsigned bytes;
@@ -3714,32 +3750,37 @@ out:
static ERL_NIF_TERM ec_key_generate(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
{
#if defined(HAVE_EC)
- EC_KEY *key = ec_key_new(env, argv[0]);
+ EC_KEY *key;
+ const EC_GROUP *group;
+ const EC_POINT *public_key;
+ ERL_NIF_TERM priv_key;
+ ERL_NIF_TERM pub_key = atom_undefined;
CHECK_OSE_CRYPTO();
- if (key && EC_KEY_generate_key(key)) {
- const EC_GROUP *group;
- const EC_POINT *public_key;
- ERL_NIF_TERM priv_key;
- ERL_NIF_TERM pub_key = atom_undefined;
-
- group = EC_KEY_get0_group(key);
- public_key = EC_KEY_get0_public_key(key);
+ if (!get_ec_key(env, argv[0], argv[1], atom_undefined, &key))
+ goto badarg;
- if (group && public_key) {
- pub_key = point2term(env, group, public_key,
- EC_KEY_get_conv_form(key));
- }
- priv_key = bn2term(env, EC_KEY_get0_private_key(key));
- EC_KEY_free(key);
- return enif_make_tuple2(env, pub_key, priv_key);
+ if (argv[1] == atom_undefined) {
+ if (!EC_KEY_generate_key(key))
+ goto badarg;
}
- else {
- if (key)
- EC_KEY_free(key);
- return enif_make_badarg(env);
+
+ group = EC_KEY_get0_group(key);
+ public_key = EC_KEY_get0_public_key(key);
+
+ if (group && public_key) {
+ pub_key = point2term(env, group, public_key,
+ EC_KEY_get_conv_form(key));
}
+ priv_key = bn2term(env, EC_KEY_get0_private_key(key));
+ EC_KEY_free(key);
+ return enif_make_tuple2(env, pub_key, priv_key);
+
+badarg:
+ if (key)
+ EC_KEY_free(key);
+ return enif_make_badarg(env);
#else
return atom_notsup;
#endif
diff --git a/lib/crypto/doc/src/crypto.xml b/lib/crypto/doc/src/crypto.xml
index 271130a9e6..4a8ba5c1bf 100644
--- a/lib/crypto/doc/src/crypto.xml
+++ b/lib/crypto/doc/src/crypto.xml
@@ -170,6 +170,36 @@
<funcs>
<func>
+ <name>block_encrypt(Type, Key, PlainText) -> CipherText</name>
+ <fsummary>Encrypt <c>PlainText</c> according to <c>Type</c> block cipher</fsummary>
+ <type>
+ <v>Type = des_ecb | blowfish_ecb | aes_ecb </v>
+ <v>Key = block_key() </v>
+ <v>PlainText = iodata() </v>
+ </type>
+ <desc>
+ <p>Encrypt <c>PlainText</c> according to <c>Type</c> block cipher.</p>
+ <p>May throw exception <c>notsup</c> in case the chosen <c>Type</c>
+ is not supported by the underlying OpenSSL implementation.</p>
+ </desc>
+ </func>
+
+ <func>
+ <name>block_decrypt(Type, Key, CipherText) -> PlainText</name>
+ <fsummary>Decrypt <c>CipherText</c> according to <c>Type</c> block cipher</fsummary>
+ <type>
+ <v>Type = des_ecb | blowfish_ecb | aes_ecb </v>
+ <v>Key = block_key() </v>
+ <v>PlainText = iodata() </v>
+ </type>
+ <desc>
+ <p>Decrypt <c>CipherText</c> according to <c>Type</c> block cipher.</p>
+ <p>May throw exception <c>notsup</c> in case the chosen <c>Type</c>
+ is not supported by the underlying OpenSSL implementation.</p>
+ </desc>
+ </func>
+
+ <func>
<name>block_encrypt(Type, Key, Ivec, PlainText) -> CipherText</name>
<name>block_encrypt(AeadType, Key, Ivec, {AAD, PlainText}) -> {CipherText, CipherTag}</name>
<fsummary>Encrypt <c>PlainText</c> according to <c>Type</c> block cipher</fsummary>
@@ -181,7 +211,7 @@
<v>AAD = IVec = CipherText = CipherTag = binary()</v>
</type>
<desc>
- <p>Encrypt <c>PlainText</c>according to <c>Type</c> block cipher.
+ <p>Encrypt <c>PlainText</c> according to <c>Type</c> block cipher.
<c>IVec</c> is an arbitrary initializing vector.</p>
<p>In AEAD (Authenticated Encryption with Associated Data) mode, encrypt
<c>PlainText</c>according to <c>Type</c> block cipher and calculate
@@ -203,7 +233,7 @@
<v>AAD = IVec = CipherText = CipherTag = binary()</v>
</type>
<desc>
- <p>Decrypt <c>CipherText</c>according to <c>Type</c> block cipher.
+ <p>Decrypt <c>CipherText</c> according to <c>Type</c> block cipher.
<c>IVec</c> is an arbitrary initializing vector.</p>
<p>In AEAD (Authenticated Encryption with Associated Data) mode, decrypt
<c>CipherText</c>according to <c>Type</c> block cipher and check the authenticity
@@ -269,7 +299,7 @@
<v>SrpUserParams = {user, [Generator::binary(), Prime::binary(), Version::atom()]}</v>
<v>SrpHostParams = {host, [Verifier::binary(), Generator::binary(), Prime::binary(), Version::atom()]}</v>
<v>PublicKey = dh_public() | ecdh_public() | srp_public() </v>
- <v>PrivKeyIn = undefined | dh_private() | srp_private() </v>
+ <v>PrivKeyIn = undefined | dh_private() | ecdh_private() | srp_private() </v>
<v>PrivKeyOut = dh_private() | ecdh_private() | srp_private() </v>
</type>
<desc>
diff --git a/lib/crypto/src/crypto.erl b/lib/crypto/src/crypto.erl
index 7f82fa83fd..e8845ed52f 100644
--- a/lib/crypto/src/crypto.erl
+++ b/lib/crypto/src/crypto.erl
@@ -211,7 +211,7 @@ supports()->
[{hashs, Hashs},
{ciphers, [des_cbc, des_cfb, des3_cbc, des_ede3, blowfish_cbc,
blowfish_cfb64, blowfish_ofb64, blowfish_ecb, aes_cbc128, aes_cfb8, aes_cfb128,
- aes_cbc256, rc2_cbc, aes_ctr, rc4] ++ Ciphers},
+ aes_cbc256, rc2_cbc, aes_ctr, rc4, aes_ecb] ++ Ciphers},
{public_keys, [rsa, dss, dh, srp] ++ PubKeys}
].
@@ -368,19 +368,24 @@ block_decrypt(chacha20_poly1305, Key, Ivec, {AAD, Data, Tag}) ->
end;
block_decrypt(rc2_cbc, Key, Ivec, Data) ->
rc2_cbc_decrypt(Key, Ivec, Data).
--spec block_encrypt(des_ecb | blowfish_ecb, Key::iodata(), Data::iodata()) -> binary().
+
+-spec block_encrypt(des_ecb | blowfish_ecb | aes_ecb, Key::iodata(), Data::iodata()) -> binary().
block_encrypt(des_ecb, Key, Data) ->
des_ecb_encrypt(Key, Data);
block_encrypt(blowfish_ecb, Key, Data) ->
- blowfish_ecb_encrypt(Key, Data).
+ blowfish_ecb_encrypt(Key, Data);
+block_encrypt(aes_ecb, Key, Data) ->
+ aes_ecb_encrypt(Key, Data).
--spec block_decrypt(des_ecb | blowfish_ecb, Key::iodata(), Data::iodata()) -> binary().
+-spec block_decrypt(des_ecb | blowfish_ecb | aes_ecb, Key::iodata(), Data::iodata()) -> binary().
block_decrypt(des_ecb, Key, Data) ->
des_ecb_decrypt(Key, Data);
block_decrypt(blowfish_ecb, Key, Data) ->
- blowfish_ecb_decrypt(Key, Data).
+ blowfish_ecb_decrypt(Key, Data);
+block_decrypt(aes_ecb, Key, Data) ->
+ aes_ecb_decrypt(Key, Data).
-spec next_iv(des_cbc | des3_cbc | aes_cbc | aes_ige, Data::iodata()) -> binary().
@@ -588,9 +593,8 @@ generate_key(srp, {user, [Generator, Prime, Version]}, PrivateArg)
end,
user_srp_gen_key(Private, Generator, Prime);
-generate_key(ecdh, Curve, undefined) ->
- ec_key_generate(nif_curve_params(Curve)).
-
+generate_key(ecdh, Curve, PrivKey) ->
+ ec_key_generate(nif_curve_params(Curve), ensure_int_as_bin(PrivKey)).
compute_key(dh, OthersPublicKey, MyPrivateKey, DHParameters) ->
case dh_compute_key_nif(ensure_int_as_bin(OthersPublicKey),
@@ -1393,6 +1397,18 @@ aes_ctr_encrypt(_Key, _IVec, _Data) -> ?nif_stub.
aes_ctr_decrypt(_Key, _IVec, _Cipher) -> ?nif_stub.
%%
+%% AES - in electronic codebook mode (ECB)
+%%
+aes_ecb_encrypt(Key, Data) ->
+ aes_ecb_crypt(Key, Data, true).
+
+aes_ecb_decrypt(Key, Data) ->
+ aes_ecb_crypt(Key, Data, false).
+
+aes_ecb_crypt(_Key, __Data, _IsEncrypt) -> ?nif_stub.
+
+
+%%
%% AES - in counter mode (CTR) with state maintained for multi-call streaming
%%
-type ctr_state() :: { iodata(), binary(), binary(), integer() }.
@@ -1555,7 +1571,7 @@ dh_compute_key(OthersPublicKey, MyPrivateKey, DHParameters) ->
dh_compute_key_nif(_OthersPublicKey, _MyPrivateKey, _DHParameters) -> ?nif_stub.
-ec_key_generate(_Key) -> ?nif_stub.
+ec_key_generate(_Curve, _Key) -> ?nif_stub.
ecdh_compute_key_nif(_Others, _Curve, _My) -> ?nif_stub.
diff --git a/lib/crypto/test/crypto_SUITE.erl b/lib/crypto/test/crypto_SUITE.erl
index 1031e6403f..72944eea8e 100644
--- a/lib/crypto/test/crypto_SUITE.erl
+++ b/lib/crypto/test/crypto_SUITE.erl
@@ -58,6 +58,7 @@ all() ->
{group, aes_cfb8},
{group, aes_cfb128},
{group, aes_cbc256},
+ {group, aes_ecb},
{group, aes_ige256},
{group, rc2_cbc},
{group, rc4},
@@ -84,7 +85,7 @@ groups() ->
{dss, [], [sign_verify]},
{ecdsa, [], [sign_verify]},
{dh, [], [generate_compute]},
- {ecdh, [], [compute]},
+ {ecdh, [], [compute, generate]},
{srp, [], [generate_compute]},
{des_cbc, [], [block]},
{des_cfb, [], [block]},
@@ -96,6 +97,7 @@ groups() ->
{aes_cfb8,[], [block]},
{aes_cfb128,[], [block]},
{aes_cbc256,[], [block]},
+ {aes_ecb,[], [block]},
{aes_ige256,[], [block]},
{blowfish_cbc, [], [block]},
{blowfish_ecb, [], [block]},
@@ -243,6 +245,12 @@ compute(Config) when is_list(Config) ->
Gen = proplists:get_value(compute, Config),
lists:foreach(fun do_compute/1, Gen).
%%--------------------------------------------------------------------
+generate() ->
+ [{doc, " Test crypto:generate_key"}].
+generate(Config) when is_list(Config) ->
+ Gen = proplists:get_value(generate, Config),
+ lists:foreach(fun do_generate/1, Gen).
+%%--------------------------------------------------------------------
mod_pow() ->
[{doc, "mod_pow testing (A ^ M % P with bignums)"}].
mod_pow(Config) when is_list(Config) ->
@@ -494,6 +502,14 @@ do_compute({ecdh = Type, Pub, Priv, Curve, SharedSecret}) ->
ct:fail({{crypto, compute_key, [Type, Pub, Priv, Curve]}, {expected, SharedSecret}, {got, Other}})
end.
+do_generate({ecdh = Type, Curve, Priv, Pub}) ->
+ case crypto:generate_key(Type, Curve, Priv) of
+ {Pub, _} ->
+ ok;
+ {Other, _} ->
+ ct:fail({{crypto, generate_key, [Type, Priv, Curve]}, {expected, Pub}, {got, Other}})
+ end.
+
hexstr2point(X, Y) ->
<<4:8, (hexstr2bin(X))/binary, (hexstr2bin(Y))/binary>>.
@@ -721,7 +737,8 @@ group_config(srp, Config) ->
[{generate_compute, GenerateCompute} | Config];
group_config(ecdh, Config) ->
Compute = ecdh(),
- [{compute, Compute} | Config];
+ Generate = ecc(),
+ [{compute, Compute}, {generate, Generate} | Config];
group_config(dh, Config) ->
GenerateCompute = [dh()],
[{generate_compute, GenerateCompute} | Config];
@@ -749,6 +766,9 @@ group_config(aes_cbc128, Config) ->
group_config(aes_cbc256, Config) ->
Block = aes_cbc256(),
[{block, Block} | Config];
+group_config(aes_ecb, Config) ->
+ Block = aes_ecb(),
+ [{block, Block} | Config];
group_config(aes_ige256, Config) ->
Block = aes_ige256(),
[{block, Block} | Config];
@@ -1183,6 +1203,106 @@ aes_cbc256() ->
hexstr2bin("f69f2445df4f9b17ad2b417be66c3710")}
].
+aes_ecb() ->
+ [
+ {aes_ecb,
+ <<"YELLOW SUBMARINE">>,
+ <<"YELLOW SUBMARINE">>},
+ {aes_ecb,
+ <<"0000000000000000">>,
+ <<"0000000000000000">>},
+ {aes_ecb,
+ <<"FFFFFFFFFFFFFFFF">>,
+ <<"FFFFFFFFFFFFFFFF">>},
+ {aes_ecb,
+ <<"3000000000000000">>,
+ <<"1000000000000001">>},
+ {aes_ecb,
+ <<"1111111111111111">>,
+ <<"1111111111111111">>},
+ {aes_ecb,
+ <<"0123456789ABCDEF">>,
+ <<"1111111111111111">>},
+ {aes_ecb,
+ <<"0000000000000000">>,
+ <<"0000000000000000">>},
+ {aes_ecb,
+ <<"FEDCBA9876543210">>,
+ <<"0123456789ABCDEF">>},
+ {aes_ecb,
+ <<"7CA110454A1A6E57">>,
+ <<"01A1D6D039776742">>},
+ {aes_ecb,
+ <<"0131D9619DC1376E">>,
+ <<"5CD54CA83DEF57DA">>},
+ {aes_ecb,
+ <<"07A1133E4A0B2686">>,
+ <<"0248D43806F67172">>},
+ {aes_ecb,
+ <<"3849674C2602319E">>,
+ <<"51454B582DDF440A">>},
+ {aes_ecb,
+ <<"04B915BA43FEB5B6">>,
+ <<"42FD443059577FA2">>},
+ {aes_ecb,
+ <<"0113B970FD34F2CE">>,
+ <<"059B5E0851CF143A">>},
+ {aes_ecb,
+ <<"0170F175468FB5E6">>,
+ <<"0756D8E0774761D2">>},
+ {aes_ecb,
+ <<"43297FAD38E373FE">>,
+ <<"762514B829BF486A">>},
+ {aes_ecb,
+ <<"07A7137045DA2A16">>,
+ <<"3BDD119049372802">>},
+ {aes_ecb,
+ <<"04689104C2FD3B2F">>,
+ <<"26955F6835AF609A">>},
+ {aes_ecb,
+ <<"37D06BB516CB7546">>,
+ <<"164D5E404F275232">>},
+ {aes_ecb,
+ <<"1F08260D1AC2465E">>,
+ <<"6B056E18759F5CCA">>},
+ {aes_ecb,
+ <<"584023641ABA6176">>,
+ <<"004BD6EF09176062">>},
+ {aes_ecb,
+ <<"025816164629B007">>,
+ <<"480D39006EE762F2">>},
+ {aes_ecb,
+ <<"49793EBC79B3258F">>,
+ <<"437540C8698F3CFA">>},
+ {aes_ecb,
+ <<"018310DC409B26D6">>,
+ <<"1D9D5C5018F728C2">>},
+ {aes_ecb,
+ <<"1C587F1C13924FEF">>,
+ <<"305532286D6F295A">>},
+ {aes_ecb,
+ <<"0101010101010101">>,
+ <<"0123456789ABCDEF">>},
+ {aes_ecb,
+ <<"1F1F1F1F0E0E0E0E">>,
+ <<"0123456789ABCDEF">>},
+ {aes_ecb,
+ <<"E0FEE0FEF1FEF1FE">>,
+ <<"0123456789ABCDEF">>},
+ {aes_ecb,
+ <<"0000000000000000">>,
+ <<"FFFFFFFFFFFFFFFF">>},
+ {aes_ecb,
+ <<"FFFFFFFFFFFFFFFF">>,
+ <<"0000000000000000">>},
+ {aes_ecb,
+ <<"0123456789ABCDEF">>,
+ <<"0000000000000000">>},
+ {aes_ecb,
+ <<"FEDCBA9876543210">>,
+ <<"FFFFFFFFFFFFFFFF">>}
+ ].
+
aes_ige256() ->
[{aes_ige256,
hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"),
@@ -1218,6 +1338,38 @@ aes_cfb8() ->
{aes_cfb8,
hexstr2bin("2b7e151628aed2a6abf7158809cf4f3c"),
hexstr2bin("26751F67A3CBB140B1808CF187A4F4DF"),
+ hexstr2bin("f69f2445df4f9b17ad2b417be66c3710")},
+ {aes_cfb8,
+ hexstr2bin("8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b"),
+ hexstr2bin("000102030405060708090a0b0c0d0e0f"),
+ hexstr2bin("6bc1bee22e409f96e93d7e117393172a")},
+ {aes_cfb8,
+ hexstr2bin("8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b"),
+ hexstr2bin("cdc80d6fddf18cab34c25909c99a4174"),
+ hexstr2bin("ae2d8a571e03ac9c9eb76fac45af8e51")},
+ {aes_cfb8,
+ hexstr2bin("8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b"),
+ hexstr2bin("67ce7f7f81173621961a2b70171d3d7a"),
+ hexstr2bin("30c81c46a35ce411e5fbc1191a0a52ef")},
+ {aes_cfb8,
+ hexstr2bin("8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b"),
+ hexstr2bin("2e1e8a1dd59b88b1c8e60fed1efac4c9"),
+ hexstr2bin("f69f2445df4f9b17ad2b417be66c3710")},
+ {aes_cfb8,
+ hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"),
+ hexstr2bin("000102030405060708090a0b0c0d0e0f"),
+ hexstr2bin("6bc1bee22e409f96e93d7e117393172a")},
+ {aes_cfb8,
+ hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"),
+ hexstr2bin("dc7e84bfda79164b7ecd8486985d3860"),
+ hexstr2bin("ae2d8a571e03ac9c9eb76fac45af8e51")},
+ {aes_cfb8,
+ hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"),
+ hexstr2bin("39ffed143b28b1c832113c6331e5407b"),
+ hexstr2bin("30c81c46a35ce411e5fbc1191a0a52ef")},
+ {aes_cfb8,
+ hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"),
+ hexstr2bin("df10132415e54b92a13ed0a8267ae2f9"),
hexstr2bin("f69f2445df4f9b17ad2b417be66c3710")}
].
@@ -1237,6 +1389,38 @@ aes_cfb128() ->
{aes_cfb128,
hexstr2bin("2b7e151628aed2a6abf7158809cf4f3c"),
hexstr2bin("26751F67A3CBB140B1808CF187A4F4DF"),
+ hexstr2bin("f69f2445df4f9b17ad2b417be66c3710")},
+ {aes_cfb128,
+ hexstr2bin("8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b"),
+ hexstr2bin("000102030405060708090a0b0c0d0e0f"),
+ hexstr2bin("6bc1bee22e409f96e93d7e117393172a")},
+ {aes_cfb128,
+ hexstr2bin("8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b"),
+ hexstr2bin("cdc80d6fddf18cab34c25909c99a4174"),
+ hexstr2bin("ae2d8a571e03ac9c9eb76fac45af8e51")},
+ {aes_cfb128,
+ hexstr2bin("8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b"),
+ hexstr2bin("67ce7f7f81173621961a2b70171d3d7a"),
+ hexstr2bin("30c81c46a35ce411e5fbc1191a0a52ef")},
+ {aes_cfb128,
+ hexstr2bin("8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b"),
+ hexstr2bin("2e1e8a1dd59b88b1c8e60fed1efac4c9"),
+ hexstr2bin("f69f2445df4f9b17ad2b417be66c3710")},
+ {aes_cfb128,
+ hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"),
+ hexstr2bin("000102030405060708090a0b0c0d0e0f"),
+ hexstr2bin("6bc1bee22e409f96e93d7e117393172a")},
+ {aes_cfb128,
+ hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"),
+ hexstr2bin("dc7e84bfda79164b7ecd8486985d3860"),
+ hexstr2bin("ae2d8a571e03ac9c9eb76fac45af8e51")},
+ {aes_cfb128,
+ hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"),
+ hexstr2bin("39ffed143b28b1c832113c6331e5407b"),
+ hexstr2bin("30c81c46a35ce411e5fbc1191a0a52ef")},
+ {aes_cfb128,
+ hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"),
+ hexstr2bin("df10132415e54b92a13ed0a8267ae2f9"),
hexstr2bin("f69f2445df4f9b17ad2b417be66c3710")}
].
@@ -1968,6 +2152,27 @@ rsa_oaep() ->
Msg = hexstr2bin("750c4047f547e8e41411856523298ac9bae245efaf1397fbe56f9dd5"),
{rsa, Public, Private, Msg, rsa_pkcs1_oaep_padding}.
+ecc() ->
+%% http://point-at-infinity.org/ecc/nisttv
+%%
+%% Test vectors for the NIST elliptic curves P192, P224, P256, P384, P521,
+%% B163, B233, B283, B409, B571, K163, K233, K283, K409 and K571. For more
+%% information about the curves see
+%% http://csrc.nist.gov/encryption/dss/ecdsa/NISTReCur.pdf
+%%
+ [{ecdh,secp192r1,1,
+ hexstr2point("188DA80EB03090F67CBF20EB43A18800F4FF0AFD82FF1012",
+ "07192B95FFC8DA78631011ED6B24CDD573F977A11E794811")},
+ {ecdh,secp192r1,2,
+ hexstr2point("DAFEBF5828783F2AD35534631588A3F629A70FB16982A888",
+ "DD6BDA0D993DA0FA46B27BBC141B868F59331AFA5C7E93AB")},
+ {ecdh,secp192r1,3,
+ hexstr2point("76E32A2557599E6EDCD283201FB2B9AADFD0D359CBB263DA",
+ "782C37E372BA4520AA62E0FED121D49EF3B543660CFD05FD")},
+ {ecdh,secp192r1,4,
+ hexstr2point("35433907297CC378B0015703374729D7A4FE46647084E4BA",
+ "A2649984F2135C301EA3ACB0776CD4F125389B311DB3BE32")}].
+
no_padding() ->
Public = [_, Mod] = rsa_public(),
Private = rsa_private(),
diff --git a/lib/debugger/src/dbg_wx_settings.erl b/lib/debugger/src/dbg_wx_settings.erl
index 20aac74c3d..2c332c0a54 100644
--- a/lib/debugger/src/dbg_wx_settings.erl
+++ b/lib/debugger/src/dbg_wx_settings.erl
@@ -65,14 +65,8 @@ open_win(Win, Pos, SFile, Str, What) ->
{style,What}]),
case wxFileDialog:showModal(FD) of
?wxID_OK ->
- case wxFileDialog:getPaths(FD) of
- [NewFile] ->
- wxFileDialog:destroy(FD),
- {ok, NewFile};
- _ ->
- wxFileDialog:destroy(FD),
- cancel
- end;
+ NewFile = wxFileDialog:getPath(FD),
+ {ok, NewFile};
_ ->
wxFileDialog:destroy(FD),
cancel
diff --git a/lib/dialyzer/doc/src/dialyzer.xml b/lib/dialyzer/doc/src/dialyzer.xml
index e482b1e6f8..b52c1edebf 100644
--- a/lib/dialyzer/doc/src/dialyzer.xml
+++ b/lib/dialyzer/doc/src/dialyzer.xml
@@ -139,7 +139,11 @@
<tag><c><![CDATA[-Wwarn]]></c></tag>
<item>A family of options which selectively turn on/off warnings
(for help on the names of warnings use
- <c><![CDATA[dialyzer -Whelp]]></c>).</item>
+ <c><![CDATA[dialyzer -Whelp]]></c>).
+ Note that the options can also be given in the file with a
+ <c>-dialyzer()</c> attribute. See <seealso
+ marker="#suppression">Requesting or Suppressing Warnings in
+ Source Files</seealso> below for details.</item>
<tag><c><![CDATA[--shell]]></c></tag>
<item>Do not disable the Erlang shell while running the GUI.</item>
<tag><c><![CDATA[--version]]></c> (or <c><![CDATA[-v]]></c>)</tag>
@@ -269,6 +273,71 @@
given from the command line, so please refer to the sections above for
a description of these.</p>
</section>
+
+ <section>
+ <marker id="suppression"></marker>
+ <title>Requesting or Suppressing Warnings in Source Files</title>
+ <p>
+ The <c>-dialyzer()</c> attribute can be used for turning off
+ warnings in a module by specifying functions or warning options.
+ For example, to turn off all warnings for the function
+ <c>f/0</c>, include the following line:
+ </p>
+<code type="none">
+-dialyzer({nowarn_function, f/0}).
+</code>
+ <p>To turn off warnings for improper lists, add the following line
+ to the source file:
+ </p>
+<code type="none">
+-dialyzer(no_improper_lists).
+</code>
+ <p>The <c>-dialyzer()</c> attribute is allowed after function
+ declarations. Lists of warning options or functions are allowed:
+ </p>
+<code type="none">
+-dialyzer([{nowarn_function, [f/0]}, no_improper_lists]).
+</code>
+ <p>
+ Warning options can be restricted to functions:
+ </p>
+<code type="none">
+-dialyzer({no_improper_lists, g/0}).
+</code>
+<code type="none">
+-dialyzer({[no_return, no_match], [g/0, h/0]}).
+</code>
+ <p>
+ For help on the warning options use <c>dialyzer -Whelp</c>. The
+ options are also enumerated <seealso
+ marker="#gui/1">below</seealso> (<c>WarnOpts</c>).
+ </p>
+ <note>
+ <p>
+ The <c>-dialyzer()</c> attribute is not checked by the Erlang
+ Compiler, but by the Dialyzer itself.
+ </p>
+ </note>
+ <note>
+ <p>
+ The warning option <c>-Wrace_conditions</c> has no effect when
+ set in source files.
+ </p>
+ </note>
+ <p>
+ The <c>-dialyzer()</c> attribute can also be used for turning on
+ warnings. For instance, if a module has been fixed regarding
+ unmatched returns, adding the line
+ </p>
+<code type="none">
+-dialyzer(unmatched_returns).
+</code>
+ <p>
+ can help in assuring that no new unmatched return warnings are
+ introduced.
+ </p>
+ </section>
+
<funcs>
<func>
<name>gui() -> ok | {error, Msg}</name>
@@ -283,7 +352,7 @@
OptList :: [Option]
Option :: {files, [Filename :: string()]}
| {files_rec, [DirName :: string()]}
- | {defines, [{Macro: atom(), Value : term()}]}
+ | {defines, [{Macro :: atom(), Value :: term()}]}
| {from, src_code | byte_code} %% Defaults to byte_code
| {init_plt, FileName :: string()} %% If changed from default
| {plts, [FileName :: string()]} %% If changed from default
diff --git a/lib/dialyzer/src/dialyzer.erl b/lib/dialyzer/src/dialyzer.erl
index cec94a49fd..c9e7da9ef0 100644
--- a/lib/dialyzer/src/dialyzer.erl
+++ b/lib/dialyzer/src/dialyzer.erl
@@ -282,15 +282,17 @@ cl_check_log(none) ->
cl_check_log(Output) ->
io:format(" Check output file `~s' for details\n", [Output]).
--spec format_warning(dial_warning()) -> string().
+-spec format_warning(raw_warning()) -> string().
format_warning(W) ->
format_warning(W, basename).
--spec format_warning(dial_warning(), fopt()) -> string().
+-spec format_warning(raw_warning() | dial_warning(), fopt()) -> string().
+format_warning({Tag, {File, Line, _MFA}, Msg}, FOpt) ->
+ format_warning({Tag, {File, Line}, Msg}, FOpt);
format_warning({_Tag, {File, Line}, Msg}, FOpt) when is_list(File),
- is_integer(Line) ->
+ is_integer(Line) ->
F = case FOpt of
fullpath -> File;
basename -> filename:basename(File)
diff --git a/lib/dialyzer/src/dialyzer.hrl b/lib/dialyzer/src/dialyzer.hrl
index 9a25f86512..90addc35a8 100644
--- a/lib/dialyzer/src/dialyzer.hrl
+++ b/lib/dialyzer/src/dialyzer.hrl
@@ -84,6 +84,15 @@
-type dial_warning() :: {dial_warn_tag(), file_line(), {atom(), [term()]}}.
%%
+%% This is the representation of each warning before suppressions have
+%% been applied
+%%
+-type m_or_mfa() :: module() % warnings not associated with any function
+ | mfa().
+-type warning_info() :: {file:filename(), non_neg_integer(), m_or_mfa()}.
+-type raw_warning() :: {dial_warn_tag(), warning_info(), {atom(), [term()]}}.
+
+%%
%% This is the representation of dialyzer's internal errors
%%
-type dial_error() :: any(). %% XXX: underspecified
@@ -103,6 +112,7 @@
-type fopt() :: 'basename' | 'fullpath'.
-type format() :: 'formatted' | 'raw'.
-type label() :: non_neg_integer().
+-type dial_warn_tags():: ordsets:ordset(dial_warn_tag()).
-type rep_mode() :: 'quiet' | 'normal' | 'verbose'.
-type start_from() :: 'byte_code' | 'src_code'.
-type mfa_or_funlbl() :: label() | mfa().
@@ -138,7 +148,7 @@
init_plts = [] :: [file:filename()],
include_dirs = [] :: [file:filename()],
output_plt = none :: 'none' | file:filename(),
- legal_warnings = ordsets:new() :: ordsets:ordset(dial_warn_tag()),
+ legal_warnings = ordsets:new() :: dial_warn_tags(),
report_mode = normal :: rep_mode(),
erlang_mode = false :: boolean(),
use_contracts = true :: boolean(),
diff --git a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
index af1c2b7e3a..5ff7ad9c6f 100644
--- a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
+++ b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
@@ -39,8 +39,6 @@
one_file_result/0,
compile_result/0]).
--export_type([no_warn_unused/0]).
-
-include("dialyzer.hrl").
-record(analysis_state,
@@ -50,8 +48,9 @@
defines = [] :: [dial_define()],
doc_plt :: dialyzer_plt:plt(),
include_dirs = [] :: [file:filename()],
- no_warn_unused :: no_warn_unused(),
parent :: pid(),
+ legal_warnings :: % command line options
+ [dial_warn_tag()],
plt :: dialyzer_plt:plt(),
start_from = byte_code :: start_from(),
use_contracts = true :: boolean(),
@@ -59,9 +58,10 @@
solvers :: [solver()]
}).
--record(server_state, {parent :: pid(), legal_warnings :: [dial_warn_tag()]}).
-
--type no_warn_unused() :: sets:set(mfa()).
+-record(server_state,
+ {
+ parent :: pid()
+ }).
%%--------------------------------------------------------------------
%% Main
@@ -75,24 +75,24 @@ start(Parent, LegalWarnings, Analysis) ->
Analysis0 =
Analysis#analysis{race_detection = RacesOn, timing_server = TimingServer},
Analysis1 = expand_files(Analysis0),
- Analysis2 = run_analysis(Analysis1),
- State = #server_state{parent = Parent, legal_warnings = LegalWarnings},
+ Analysis2 = run_analysis(Analysis1, LegalWarnings),
+ State = #server_state{parent = Parent},
loop(State, Analysis2, none),
dialyzer_timing:stop(TimingServer).
-run_analysis(Analysis) ->
+run_analysis(Analysis, LegalWarnings) ->
Self = self(),
- Fun = fun() -> analysis_start(Self, Analysis) end,
+ Fun = fun() -> analysis_start(Self, Analysis, LegalWarnings) end,
Analysis#analysis{analysis_pid = spawn_link(Fun)}.
-loop(#server_state{parent = Parent, legal_warnings = LegalWarnings} = State,
+loop(#server_state{parent = Parent} = State,
#analysis{analysis_pid = AnalPid} = Analysis, ExtCalls) ->
receive
{AnalPid, log, LogMsg} ->
send_log(Parent, LogMsg),
loop(State, Analysis, ExtCalls);
{AnalPid, warnings, Warnings} ->
- case filter_warnings(LegalWarnings, Warnings) of
+ case Warnings of
[] -> ok;
SendWarnings ->
send_warnings(Parent, SendWarnings)
@@ -129,7 +129,7 @@ loop(#server_state{parent = Parent, legal_warnings = LegalWarnings} = State,
%% The Analysis
%%--------------------------------------------------------------------
-analysis_start(Parent, Analysis) ->
+analysis_start(Parent, Analysis, LegalWarnings) ->
CServer = dialyzer_codeserver:new(),
Plt = Analysis#analysis.plt,
State = #analysis_state{codeserver = CServer,
@@ -139,13 +139,14 @@ analysis_start(Parent, Analysis) ->
include_dirs = Analysis#analysis.include_dirs,
plt = Plt,
parent = Parent,
+ legal_warnings = LegalWarnings,
start_from = Analysis#analysis.start_from,
use_contracts = Analysis#analysis.use_contracts,
timing_server = Analysis#analysis.timing_server,
solvers = Analysis#analysis.solvers
},
Files = ordsets:from_list(Analysis#analysis.files),
- {Callgraph, NoWarn, TmpCServer0} = compile_and_store(Files, State),
+ {Callgraph, TmpCServer0} = compile_and_store(Files, State),
%% Remote type postprocessing
NewCServer =
try
@@ -177,7 +178,6 @@ analysis_start(Parent, Analysis) ->
State0 = State#analysis_state{plt = NewPlt1},
dump_callgraph(Callgraph, State0, Analysis),
State1 = State0#analysis_state{codeserver = NewCServer},
- State2 = State1#analysis_state{no_warn_unused = NoWarn},
%% Remove all old versions of the files being analyzed
AllNodes = dialyzer_callgraph:all_nodes(Callgraph),
Plt1 = dialyzer_plt:delete_list(NewPlt1, AllNodes),
@@ -187,14 +187,14 @@ analysis_start(Parent, Analysis) ->
true -> dialyzer_callgraph:put_race_detection(true, Callgraph);
false -> Callgraph
end,
- State3 = analyze_callgraph(NewCallgraph, State2#analysis_state{plt = Plt1}),
+ State2 = analyze_callgraph(NewCallgraph, State1#analysis_state{plt = Plt1}),
dialyzer_callgraph:dispose_race_server(NewCallgraph),
rcv_and_send_ext_types(Parent),
NonExports = sets:subtract(sets:from_list(AllNodes), Exports),
NonExportsList = sets:to_list(NonExports),
- Plt2 = dialyzer_plt:delete_list(State3#analysis_state.plt, NonExportsList),
- send_codeserver_plt(Parent, CServer, State3#analysis_state.plt),
- send_analysis_done(Parent, Plt2, State3#analysis_state.doc_plt).
+ Plt2 = dialyzer_plt:delete_list(State2#analysis_state.plt, NonExportsList),
+ send_codeserver_plt(Parent, CServer, State2#analysis_state.plt),
+ send_analysis_done(Parent, Plt2, State2#analysis_state.doc_plt).
analyze_callgraph(Callgraph, #analysis_state{codeserver = Codeserver,
doc_plt = DocPlt,
@@ -210,11 +210,11 @@ analyze_callgraph(Callgraph, #analysis_state{codeserver = Codeserver,
TimingServer, Solvers, Parent),
{NewPlt0, DocPlt};
succ_typings ->
- NoWarn = State#analysis_state.no_warn_unused,
{Warnings, NewPlt0, NewDocPlt0} =
dialyzer_succ_typings:get_warnings(Callgraph, Plt, DocPlt, Codeserver,
- NoWarn, TimingServer, Solvers, Parent),
- send_warnings(State#analysis_state.parent, Warnings),
+ TimingServer, Solvers, Parent),
+ Warnings1 = filter_warnings(Warnings, Codeserver),
+ send_warnings(State#analysis_state.parent, Warnings1),
{NewPlt0, NewDocPlt0}
end,
dialyzer_callgraph:delete(Callgraph),
@@ -230,19 +230,22 @@ analyze_callgraph(Callgraph, #analysis_state{codeserver = Codeserver,
defines = [] :: [dial_define()],
include_dirs = [] :: [file:filename()],
start_from = byte_code :: start_from(),
- use_contracts = true :: boolean()
+ use_contracts = true :: boolean(),
+ legal_warnings :: [dial_warn_tag()]
}).
make_compile_init(#analysis_state{codeserver = Codeserver,
defines = Defs,
include_dirs = Dirs,
use_contracts = UseContracts,
+ legal_warnings = LegalWarnings,
start_from = StartFrom}, Callgraph) ->
#compile_init{callgraph = Callgraph,
codeserver = Codeserver,
defines = [{d, Macro, Val} || {Macro, Val} <- Defs],
include_dirs = [{i, D} || D <- Dirs],
use_contracts = UseContracts,
+ legal_warnings = LegalWarnings,
start_from = StartFrom}.
compile_and_store(Files, #analysis_state{codeserver = CServer,
@@ -252,7 +255,7 @@ compile_and_store(Files, #analysis_state{codeserver = CServer,
{T1, _} = statistics(wall_clock),
Callgraph = dialyzer_callgraph:new(),
CompileInit = make_compile_init(State, Callgraph),
- {{Failed, NoWarn, Modules}, NextLabel} =
+ {{Failed, Modules}, NextLabel} =
?timing(Timing, "compile", _C1,
dialyzer_coordinator:parallel_job(compile, Files,
CompileInit, Timing)),
@@ -281,34 +284,34 @@ compile_and_store(Files, #analysis_state{codeserver = CServer,
{T3, _} = statistics(wall_clock),
Msg2 = io_lib:format("done in ~.2f secs\n", [(T3-T2)/1000]),
send_log(Parent, Msg2),
- {Callgraph, sets:from_list(NoWarn), CServer2}.
+ {Callgraph, CServer2}.
-type compile_init_data() :: #compile_init{}.
-type error_reason() :: string().
--type compile_result() :: {[{file:filename(), error_reason()}], [mfa()],
+-type compile_result() :: {[{file:filename(), error_reason()}],
[module()]}. %%opaque
-type one_file_result() :: {error, error_reason()} |
{ok, [dialyzer_callgraph:callgraph_edge()],
- [mfa_or_funlbl()], [mfa()], module()}. %%opaque
--type compile_mid_data() :: {module(), cerl:cerl(), [mfa()],
+ [mfa_or_funlbl()], module()}. %%opaque
+-type compile_mid_data() :: {module(), cerl:cerl(),
dialyzer_callgraph:callgraph(),
dialyzer_codeserver:codeserver()}.
-spec compile_init_result() -> compile_result().
-compile_init_result() -> {[], [], []}.
+compile_init_result() -> {[], []}.
-spec add_to_result(file:filename(), one_file_result(), compile_result(),
compile_init_data()) -> compile_result().
-add_to_result(File, NewData, {Failed, NoWarn, Mods}, InitData) ->
+add_to_result(File, NewData, {Failed, Mods}, InitData) ->
case NewData of
{error, Reason} ->
- {[{File, Reason}|Failed], NoWarn, Mods};
- {ok, V, E, NewNoWarn, Mod} ->
+ {[{File, Reason}|Failed], Mods};
+ {ok, V, E, Mod} ->
Callgraph = InitData#compile_init.callgraph,
dialyzer_callgraph:add_edges(E, V, Callgraph),
- {Failed, NewNoWarn ++ NoWarn, [Mod|Mods]}
+ {Failed, [Mod|Mods]}
end.
-spec start_compilation(file:filename(), compile_init_data()) ->
@@ -318,12 +321,14 @@ start_compilation(File,
#compile_init{callgraph = Callgraph, codeserver = Codeserver,
defines = Defines, include_dirs = IncludeD,
use_contracts = UseContracts,
+ legal_warnings = LegalWarnings,
start_from = StartFrom}) ->
case StartFrom of
src_code ->
- compile_src(File, IncludeD, Defines, Callgraph, Codeserver, UseContracts);
+ compile_src(File, IncludeD, Defines, Callgraph, Codeserver,
+ UseContracts, LegalWarnings);
byte_code ->
- compile_byte(File, Callgraph, Codeserver, UseContracts)
+ compile_byte(File, Callgraph, Codeserver, UseContracts, LegalWarnings)
end.
cleanup_callgraph(#analysis_state{plt = InitPlt, parent = Parent,
@@ -357,88 +362,86 @@ cleanup_callgraph(#analysis_state{plt = InitPlt, parent = Parent,
end,
Callgraph1.
-compile_src(File, Includes, Defines, Callgraph, CServer, UseContracts) ->
+compile_src(File, Includes, Defines, Callgraph, CServer, UseContracts,
+ LegalWarnings) ->
DefaultIncludes = default_includes(filename:dirname(File)),
SrcCompOpts = dialyzer_utils:src_compiler_opts(),
CompOpts = SrcCompOpts ++ Includes ++ Defines ++ DefaultIncludes,
case dialyzer_utils:get_abstract_code_from_src(File, CompOpts) of
{error, _Msg} = Error -> Error;
{ok, AbstrCode} ->
- compile_common(File, AbstrCode, CompOpts, Callgraph, CServer, UseContracts)
+ compile_common(File, AbstrCode, CompOpts, Callgraph, CServer,
+ UseContracts, LegalWarnings)
end.
-compile_byte(File, Callgraph, CServer, UseContracts) ->
+compile_byte(File, Callgraph, CServer, UseContracts, LegalWarnings) ->
case dialyzer_utils:get_abstract_code_from_beam(File) of
error ->
{error, " Could not get abstract code for: " ++ File ++ "\n" ++
" Recompile with +debug_info or analyze starting from source code"};
{ok, AbstrCode} ->
- compile_byte(File, AbstrCode, Callgraph, CServer, UseContracts)
+ compile_byte(File, AbstrCode, Callgraph, CServer, UseContracts,
+ LegalWarnings)
end.
-compile_byte(File, AbstrCode, Callgraph, CServer, UseContracts) ->
+compile_byte(File, AbstrCode, Callgraph, CServer, UseContracts,
+ LegalWarnings) ->
case dialyzer_utils:get_compile_options_from_beam(File) of
error ->
{error, " Could not get compile options for: " ++ File ++ "\n" ++
" Recompile or analyze starting from source code"};
{ok, CompOpts} ->
- compile_common(File, AbstrCode, CompOpts, Callgraph, CServer, UseContracts)
+ compile_common(File, AbstrCode, CompOpts, Callgraph, CServer,
+ UseContracts, LegalWarnings)
end.
-compile_common(File, AbstrCode, CompOpts, Callgraph, CServer, UseContracts) ->
+compile_common(File, AbstrCode, CompOpts, Callgraph, CServer,
+ UseContracts, LegalWarnings) ->
case dialyzer_utils:get_core_from_abstract_code(AbstrCode, CompOpts) of
error -> {error, " Could not get core Erlang code for: " ++ File};
{ok, Core} ->
Mod = cerl:concrete(cerl:module_name(Core)),
- NoWarn = abs_get_nowarn(AbstrCode, Mod),
case dialyzer_utils:get_record_and_type_info(AbstrCode) of
{error, _} = Error -> Error;
{ok, RecInfo} ->
CServer1 =
dialyzer_codeserver:store_temp_records(Mod, RecInfo, CServer),
+ MetaFunInfo =
+ dialyzer_utils:get_fun_meta_info(Mod, AbstrCode, LegalWarnings),
+ CServer2 =
+ dialyzer_codeserver:insert_fun_meta_info(MetaFunInfo, CServer1),
case UseContracts of
true ->
case dialyzer_utils:get_spec_info(Mod, AbstrCode, RecInfo) of
{error, _} = Error -> Error;
{ok, SpecInfo, CallbackInfo} ->
- CServer2 =
+ CServer3 =
dialyzer_codeserver:store_temp_contracts(Mod, SpecInfo,
CallbackInfo,
- CServer1),
- store_core(Mod, Core, NoWarn, Callgraph, CServer2)
+ CServer2),
+ store_core(Mod, Core, Callgraph, CServer3)
end;
false ->
- store_core(Mod, Core, NoWarn, Callgraph, CServer1)
+ store_core(Mod, Core, Callgraph, CServer2)
end
end
end.
-store_core(Mod, Core, NoWarn, Callgraph, CServer) ->
+store_core(Mod, Core, Callgraph, CServer) ->
Exp = get_exports_from_core(Core),
ExpTypes = get_exported_types_from_core(Core),
CServer = dialyzer_codeserver:insert_exports(Exp, CServer),
CServer = dialyzer_codeserver:insert_temp_exported_types(ExpTypes, CServer),
CoreTree = cerl:from_records(Core),
- {ok, cerl_trees:size(CoreTree), {Mod, CoreTree, NoWarn, Callgraph, CServer}}.
+ CoreSize = cerl_trees:size(CoreTree),
+ {ok, CoreSize, {Mod, CoreTree, Callgraph, CServer}}.
-spec continue_compilation(integer(), compile_mid_data()) -> one_file_result().
-continue_compilation(NextLabel, {Mod, CoreTree, NoWarn, Callgraph, CServer}) ->
+continue_compilation(NextLabel, {Mod, CoreTree, Callgraph, CServer}) ->
{LabeledTree, _NewNextLabel} = cerl_trees:label(CoreTree, NextLabel),
LabeledCore = cerl:to_records(LabeledTree),
- store_code_and_build_callgraph(Mod, LabeledCore, Callgraph, NoWarn, CServer).
-
-abs_get_nowarn(Abs, M) ->
- Opts = lists:flatten([C || {attribute, _, compile, C} <- Abs]),
- Warn = erl_lint:bool_option(warn_unused_function, nowarn_unused_function,
- true, Opts),
- case Warn of
- false ->
- [{M, F, A} || {function, _, F, A, _} <- Abs]; % all functions
- true ->
- [{M, F, A} || {nowarn_unused_function, FAs} <- Opts,
- {F, A} <- lists:flatten([FAs])]
- end.
+ store_code_and_build_callgraph(Mod, LabeledCore, Callgraph, CServer).
get_exported_types_from_core(Core) ->
Attrs = cerl:module_attrs(Core),
@@ -456,11 +459,11 @@ get_exports_from_core(Core) ->
M = cerl:atom_val(cerl:module_name(Tree)),
[{M, F, A} || {F, A} <- Exports2].
-store_code_and_build_callgraph(Mod, Core, Callgraph, NoWarn, CServer) ->
+store_code_and_build_callgraph(Mod, Core, Callgraph, CServer) ->
CoreTree = cerl:from_records(Core),
{Vertices, Edges} = dialyzer_callgraph:scan_core_tree(CoreTree, Callgraph),
CServer = dialyzer_codeserver:insert(Mod, CoreTree, CServer),
- {ok, Vertices, Edges, NoWarn, Mod}.
+ {ok, Vertices, Edges, Mod}.
%%--------------------------------------------------------------------
%% Utilities
@@ -548,10 +551,19 @@ send_warnings(Parent, Warnings) ->
Parent ! {self(), warnings, Warnings},
ok.
-filter_warnings(LegalWarnings, Warnings) ->
- [TIW || {Tag, _Id, _Warning} = TIW <- Warnings,
- ordsets:is_element(Tag, LegalWarnings)].
+filter_warnings(Warnings, Codeserver) ->
+ [TWW || {Tag, WarningInfo, _Warning} = TWW <- Warnings,
+ is_ok_fun(WarningInfo, Codeserver),
+ is_ok_tag(Tag, WarningInfo, Codeserver)].
+
+is_ok_fun({_F, _L, Module}, _Codeserver) when is_atom(Module) ->
+ true;
+is_ok_fun({_Filename, _Line, {_M, _F, _A} = MFA}, Codeserver) ->
+ not dialyzer_utils:is_suppressed_fun(MFA, Codeserver).
+is_ok_tag(Tag, {_F, _L, MorMFA}, Codeserver) ->
+ not dialyzer_utils:is_suppressed_tag(MorMFA, Tag, Codeserver).
+
send_analysis_done(Parent, Plt, DocPlt) ->
Parent ! {self(), done, Plt, DocPlt},
ok.
@@ -573,7 +585,8 @@ send_codeserver_plt(Parent, CServer, Plt ) ->
ok.
send_bad_calls(Parent, BadCalls, CodeServer) ->
- send_warnings(Parent, format_bad_calls(BadCalls, CodeServer, [])).
+ FormatedBadCalls = format_bad_calls(BadCalls, CodeServer, []),
+ send_warnings(Parent, FormatedBadCalls).
send_mod_deps(Parent, ModuleDeps) ->
Parent ! {self(), mod_deps, ModuleDeps},
@@ -585,8 +598,9 @@ format_bad_calls([{{_, _, _}, {_, module_info, A}}|Left], CodeServer, Acc)
format_bad_calls([{FromMFA, {M, F, A} = To}|Left], CodeServer, Acc) ->
{_Var, FunCode} = dialyzer_codeserver:lookup_mfa_code(FromMFA, CodeServer),
Msg = {call_to_missing, [M, F, A]},
- FileLine = find_call_file_and_line(FunCode, To),
- NewAcc = [{?WARN_CALLGRAPH, FileLine, Msg}|Acc],
+ {File, Line} = find_call_file_and_line(FunCode, To),
+ WarningInfo = {File, Line, FromMFA},
+ NewAcc = [{?WARN_CALLGRAPH, WarningInfo, Msg}|Acc],
format_bad_calls(Left, CodeServer, NewAcc);
format_bad_calls([], _CodeServer, Acc) ->
Acc.
diff --git a/lib/dialyzer/src/dialyzer_behaviours.erl b/lib/dialyzer/src/dialyzer_behaviours.erl
index bbedd3201e..19b63bd2c8 100644
--- a/lib/dialyzer/src/dialyzer_behaviours.erl
+++ b/lib/dialyzer/src/dialyzer_behaviours.erl
@@ -52,7 +52,7 @@
-spec check_callbacks(module(), [{cerl:cerl(), cerl:cerl()}], rectab(),
dialyzer_plt:plt(),
- dialyzer_codeserver:codeserver()) -> [dial_warning()].
+ dialyzer_codeserver:codeserver()) -> [raw_warning()].
check_callbacks(Module, Attrs, Records, Plt, Codeserver) ->
{Behaviours, BehLines} = get_behaviours(Attrs),
@@ -65,7 +65,7 @@ check_callbacks(Module, Attrs, Records, Plt, Codeserver) ->
State = #state{plt = Plt, filename = File, behlines = BehLines,
codeserver = Codeserver, records = Records},
Warnings = get_warnings(Module, Behaviours, State),
- [add_tag_file_line(Module, W, State) || W <- Warnings]
+ [add_tag_warning_info(Module, W, State) || W <- Warnings]
end.
%%--------------------------------------------------------------------
@@ -193,7 +193,7 @@ find_mismatching_args(Kind, [Type|Rest], [CbType|CbRest], Behaviour,
Arity, Records, N+1, NewAcc)
end.
-add_tag_file_line(_Module, {Tag, [B|_R]} = Warn, State)
+add_tag_warning_info(Module, {Tag, [B|_R]} = Warn, State)
when Tag =:= callback_missing;
Tag =:= callback_info_missing ->
{B, Line} = lists:keyfind(B, 1, State#state.behlines),
@@ -202,18 +202,18 @@ add_tag_file_line(_Module, {Tag, [B|_R]} = Warn, State)
callback_missing -> ?WARN_BEHAVIOUR;
callback_info_missing -> ?WARN_UNDEFINED_CALLBACK
end,
- {Category, {State#state.filename, Line}, Warn};
-add_tag_file_line(_Module, {Tag, [File, Line|R]}, _State)
+ {Category, {State#state.filename, Line, Module}, Warn};
+add_tag_warning_info(Module, {Tag, [File, Line|R]}, _State)
when Tag =:= callback_spec_type_mismatch;
Tag =:= callback_spec_arg_type_mismatch ->
- {?WARN_BEHAVIOUR, {File, Line}, {Tag, R}};
-add_tag_file_line(Module, {_Tag, [_B, Fun, Arity|_R]} = Warn, State) ->
+ {?WARN_BEHAVIOUR, {File, Line, Module}, {Tag, R}};
+add_tag_warning_info(Module, {_Tag, [_B, Fun, Arity|_R]} = Warn, State) ->
{_A, FunCode} =
dialyzer_codeserver:lookup_mfa_code({Module, Fun, Arity},
State#state.codeserver),
Anns = cerl:get_ann(FunCode),
- FileLine = {get_file(Anns), get_line(Anns)},
- {?WARN_BEHAVIOUR, FileLine, Warn}.
+ WarningInfo = {get_file(Anns), get_line(Anns), {Module, Fun, Arity}},
+ {?WARN_BEHAVIOUR, WarningInfo, Warn}.
get_line([Line|_]) when is_integer(Line) -> Line;
get_line([_|Tail]) -> get_line(Tail);
diff --git a/lib/dialyzer/src/dialyzer_cl.erl b/lib/dialyzer/src/dialyzer_cl.erl
index 3e7d9dfa99..debb78bd0b 100644
--- a/lib/dialyzer/src/dialyzer_cl.erl
+++ b/lib/dialyzer/src/dialyzer_cl.erl
@@ -48,7 +48,7 @@
plt_info = none :: 'none' | dialyzer_plt:plt_info(),
report_mode = normal :: rep_mode(),
return_status= ?RET_NOTHING_SUSPICIOUS :: dial_ret(),
- stored_warnings = [] :: [dial_warning()],
+ stored_warnings = [] :: [raw_warning()],
unknown_behaviours = [] :: [dialyzer_behaviours:behaviour()]
}).
@@ -627,7 +627,7 @@ format_log_cache(LogCache) ->
Str = lists:append(lists:reverse(LogCache)),
string:join(string:tokens(Str, "\n"), "\n ").
--spec store_warnings(#cl_state{}, [dial_warning()]) -> #cl_state{}.
+-spec store_warnings(#cl_state{}, [raw_warning()]) -> #cl_state{}.
store_warnings(#cl_state{stored_warnings = StoredWarnings} = St, Warnings) ->
St#cl_state{stored_warnings = StoredWarnings ++ Warnings}.
@@ -685,16 +685,22 @@ return_value(State = #cl_state{erlang_mode = ErlangMode,
unknown_behaviours(State);
false -> []
end,
+ WarningInfo = {_Filename = "", _Line = 0, _MorMFA = ''},
UnknownWarnings =
- [{?WARN_UNKNOWN, {_Filename = "", _Line = 0}, W} || W <- Unknown],
+ [{?WARN_UNKNOWN, WarningInfo, W} || W <- Unknown],
AllWarnings =
UnknownWarnings ++ process_warnings(StoredWarnings),
- {RetValue, AllWarnings}
+ {RetValue, set_warning_id(AllWarnings)}
end.
unknown_functions(#cl_state{external_calls = Calls}) ->
[{unknown_function, MFA} || MFA <- Calls].
+set_warning_id(Warnings) ->
+ lists:map(fun({Tag, {File, Line, _MorMFA}, Msg}) ->
+ {Tag, {File, Line}, Msg}
+ end, Warnings).
+
print_ext_calls(#cl_state{report_mode = quiet}) ->
ok;
print_ext_calls(#cl_state{output = Output,
@@ -817,15 +823,16 @@ print_warnings(#cl_state{output = Output,
formatted ->
[dialyzer:format_warning(W, FOpt) || W <- PrWarnings];
raw ->
- [io_lib:format("~p. \n", [W]) || W <- PrWarnings]
+ [io_lib:format("~p. \n",
+ [W]) || W <- set_warning_id(PrWarnings)]
end,
io:format(Output, "\n~s", [S])
end.
--spec process_warnings([dial_warning()]) -> [dial_warning()].
+-spec process_warnings([raw_warning()]) -> [raw_warning()].
process_warnings(Warnings) ->
- Warnings1 = lists:keysort(2, Warnings), %% Sort on file/line
+ Warnings1 = lists:keysort(2, Warnings), %% Sort on file/line (and m/mfa..)
remove_duplicate_warnings(Warnings1, []).
remove_duplicate_warnings([Duplicate, Duplicate|Left], Acc) ->
diff --git a/lib/dialyzer/src/dialyzer_codeserver.erl b/lib/dialyzer/src/dialyzer_codeserver.erl
index 593e71f30b..e0add00061 100644
--- a/lib/dialyzer/src/dialyzer_codeserver.erl
+++ b/lib/dialyzer/src/dialyzer_codeserver.erl
@@ -43,19 +43,21 @@
insert/3,
insert_exports/2,
insert_temp_exported_types/2,
+ insert_fun_meta_info/2,
is_exported/2,
lookup_mod_code/2,
lookup_mfa_code/2,
lookup_mod_records/2,
lookup_mod_contracts/2,
lookup_mfa_contract/2,
+ lookup_meta_info/2,
new/0,
set_next_core_label/2,
set_temp_records/2,
store_temp_records/3,
store_temp_contracts/4]).
--export_type([codeserver/0]).
+-export_type([codeserver/0, fun_meta_info/0]).
-include("dialyzer.hrl").
@@ -70,12 +72,19 @@
-type contracts() :: dict:dict(mfa(),dialyzer_contracts:file_contract()).
-type mod_contracts() :: dict:dict(module(), contracts()).
+%% A property-list of data compiled from -compile and -dialyzer attributes.
+-type meta_info() :: [{{'nowarn_function' | dial_warn_tag()},
+ 'mod' | 'func'}].
+-type fun_meta_info() :: [{mfa(), meta_info()}
+ | {module(), [dial_warn_tag()]}].
+
-record(codeserver, {next_core_label = 0 :: label(),
code :: dict_ets(),
exported_types :: set_ets(), % set(mfa())
records :: dict_ets(),
contracts :: dict_ets(),
callbacks :: dict_ets(),
+ fun_meta_info :: dict_ets(), % {mfa(), meta_info()}
exports :: 'clean' | set_ets(), % set(mfa())
temp_exported_types :: 'clean' | set_ets(), % set(mfa())
temp_records :: 'clean' | dict_ets(),
@@ -129,14 +138,17 @@ new() ->
CodeOptions = [compressed, public, {read_concurrency, true}],
Code = ets:new(dialyzer_codeserver_code, CodeOptions),
TempOptions = [public, {write_concurrency, true}],
- [Exports, TempExportedTypes, TempRecords, TempContracts, TempCallbacks] =
+ [Exports, FunMetaInfo, TempExportedTypes, TempRecords, TempContracts,
+ TempCallbacks] =
[ets:new(Name, TempOptions) ||
Name <-
- [dialyzer_codeserver_exports, dialyzer_codeserver_temp_exported_types,
+ [dialyzer_codeserver_exports, dialyzer_codeserver_fun_meta_info,
+ dialyzer_codeserver_temp_exported_types,
dialyzer_codeserver_temp_records, dialyzer_codeserver_temp_contracts,
dialyzer_codeserver_temp_callbacks]],
#codeserver{code = Code,
exports = Exports,
+ fun_meta_info = FunMetaInfo,
temp_exported_types = TempExportedTypes,
temp_records = TempRecords,
temp_contracts = TempContracts,
@@ -184,6 +196,12 @@ insert_exports(List, #codeserver{exports = Exports} = CS) ->
true = ets_set_insert_list(List, Exports),
CS.
+-spec insert_fun_meta_info(fun_meta_info(), codeserver()) -> codeserver().
+
+insert_fun_meta_info(List, #codeserver{fun_meta_info = FunMetaInfo} = CS) ->
+ true = ets:insert(FunMetaInfo, List),
+ CS.
+
-spec is_exported(mfa(), codeserver()) -> boolean().
is_exported(MFA, #codeserver{exports = Exports}) ->
@@ -290,6 +308,14 @@ get_file_contract(Key, ContDict) ->
lookup_mfa_contract(MFA, #codeserver{contracts = ContDict}) ->
ets_dict_find(MFA, ContDict).
+-spec lookup_meta_info(module() | mfa(), codeserver()) -> meta_info().
+
+lookup_meta_info(MorMFA, #codeserver{fun_meta_info = FunMetaInfo}) ->
+ case ets_dict_find(MorMFA, FunMetaInfo) of
+ error -> [];
+ {ok, PropList} -> PropList
+ end.
+
-spec get_contracts(codeserver()) -> mod_contracts().
get_contracts(#codeserver{contracts = ContDict}) ->
diff --git a/lib/dialyzer/src/dialyzer_contracts.erl b/lib/dialyzer/src/dialyzer_contracts.erl
index ee147ca102..4a1ba9c539 100644
--- a/lib/dialyzer/src/dialyzer_contracts.erl
+++ b/lib/dialyzer/src/dialyzer_contracts.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -351,7 +351,7 @@ solve_constraints(Contract, Call, Constraints) ->
%% Checks the contracts for functions that are not implemented
-spec contracts_without_fun(contracts(), [_], dialyzer_callgraph:callgraph()) ->
- [dial_warning()].
+ [raw_warning()].
contracts_without_fun(Contracts, AllFuns0, Callgraph) ->
AllFuns1 = [{dialyzer_callgraph:lookup_name(Label, Callgraph), Arity}
@@ -362,8 +362,9 @@ contracts_without_fun(Contracts, AllFuns0, Callgraph) ->
[warn_spec_missing_fun(MFA, Contracts) || MFA <- ErrorContractMFAs].
warn_spec_missing_fun({M, F, A} = MFA, Contracts) ->
- {FileLine, _Contract, _Xtra} = dict:fetch(MFA, Contracts),
- {?WARN_CONTRACT_SYNTAX, FileLine, {spec_missing_fun, [M, F, A]}}.
+ {{File, Line}, _Contract, _Xtra} = dict:fetch(MFA, Contracts),
+ WarningInfo = {File, Line, MFA},
+ {?WARN_CONTRACT_SYNTAX, WarningInfo, {spec_missing_fun, [M, F, A]}}.
%% This treats the "when" constraints. It will be extended, we hope.
insert_constraints([{subtype, Type1, Type2}|Left], Dict) ->
@@ -393,21 +394,22 @@ insert_constraints([], Dict) -> Dict.
store_tmp_contract(MFA, FileLine, {TypeSpec, Xtra}, SpecDict, RecordsDict) ->
%% io:format("contract from form: ~p\n", [TypeSpec]),
- TmpContract = contract_from_form(TypeSpec, RecordsDict, FileLine),
+ {Module, _, _} = MFA,
+ TmpContract = contract_from_form(TypeSpec, Module, RecordsDict, FileLine),
%% io:format("contract: ~p\n", [TmpContract]),
dict:store(MFA, {FileLine, TmpContract, Xtra}, SpecDict).
-contract_from_form(Forms, RecDict, FileLine) ->
- {CFuns, Forms1} = contract_from_form(Forms, RecDict, FileLine, [], []),
+contract_from_form(Forms, Module, RecDict, FileLine) ->
+ {CFuns, Forms1} = contract_from_form(Forms, Module, RecDict, FileLine, [], []),
#tmp_contract{contract_funs = CFuns, forms = Forms1}.
-contract_from_form([{type, _, 'fun', [_, _]} = Form | Left], RecDict,
+contract_from_form([{type, _, 'fun', [_, _]} = Form | Left], Module, RecDict,
FileLine, TypeAcc, FormAcc) ->
TypeFun =
fun(ExpTypes, AllRecords) ->
- Type =
+ NewType =
try
- erl_types:t_from_form(Form, RecDict)
+ erl_types:t_from_form(Form, ExpTypes, Module, AllRecords)
catch
throw:{error, Msg} ->
{File, Line} = FileLine,
@@ -415,61 +417,60 @@ contract_from_form([{type, _, 'fun', [_, _]} = Form | Left], RecDict,
Line, Msg]),
throw({error, NewMsg})
end,
- NewType = erl_types:t_solve_remote(Type, ExpTypes, AllRecords),
NewTypeNoVars = erl_types:subst_all_vars_to_any(NewType),
{NewTypeNoVars, []}
end,
NewTypeAcc = [TypeFun | TypeAcc],
NewFormAcc = [{Form, []} | FormAcc],
- contract_from_form(Left, RecDict, FileLine, NewTypeAcc, NewFormAcc);
+ contract_from_form(Left, Module, RecDict, FileLine, NewTypeAcc, NewFormAcc);
contract_from_form([{type, _L1, bounded_fun,
[{type, _L2, 'fun', [_, _]} = Form, Constr]}| Left],
- RecDict, FileLine, TypeAcc, FormAcc) ->
+ Module, RecDict, FileLine, TypeAcc, FormAcc) ->
TypeFun =
fun(ExpTypes, AllRecords) ->
{Constr1, VarDict} =
- process_constraints(Constr, RecDict, ExpTypes, AllRecords),
- Type = erl_types:t_from_form(Form, RecDict, VarDict),
- NewType = erl_types:t_solve_remote(Type, ExpTypes, AllRecords),
+ process_constraints(Constr, Module, RecDict, ExpTypes, AllRecords),
+ NewType = erl_types:t_from_form(Form, ExpTypes, Module, AllRecords,
+ VarDict),
NewTypeNoVars = erl_types:subst_all_vars_to_any(NewType),
{NewTypeNoVars, Constr1}
end,
NewTypeAcc = [TypeFun | TypeAcc],
NewFormAcc = [{Form, Constr} | FormAcc],
- contract_from_form(Left, RecDict, FileLine, NewTypeAcc, NewFormAcc);
-contract_from_form([], _RecDict, _FileLine, TypeAcc, FormAcc) ->
+ contract_from_form(Left, Module, RecDict, FileLine, NewTypeAcc, NewFormAcc);
+contract_from_form([], _Module, _RecDict, _FileLine, TypeAcc, FormAcc) ->
{lists:reverse(TypeAcc), lists:reverse(FormAcc)}.
-process_constraints(Constrs, RecDict, ExpTypes, AllRecords) ->
- Init0 = initialize_constraints(Constrs, RecDict, ExpTypes, AllRecords),
+process_constraints(Constrs, Module, RecDict, ExpTypes, AllRecords) ->
+ Init0 = initialize_constraints(Constrs, Module, RecDict, ExpTypes, AllRecords),
Init = remove_cycles(Init0),
- constraints_fixpoint(Init, RecDict, ExpTypes, AllRecords).
+ constraints_fixpoint(Init, Module, RecDict, ExpTypes, AllRecords).
-initialize_constraints(Constrs, RecDict, ExpTypes, AllRecords) ->
- initialize_constraints(Constrs, RecDict, ExpTypes, AllRecords, []).
+initialize_constraints(Constrs, Module, RecDict, ExpTypes, AllRecords) ->
+ initialize_constraints(Constrs, Module, RecDict, ExpTypes, AllRecords, []).
-initialize_constraints([], _RecDict, _ExpTypes, _AllRecords, Acc) ->
+initialize_constraints([], _Module, _RecDict, _ExpTypes, _AllRecords, Acc) ->
Acc;
-initialize_constraints([Constr|Rest], RecDict, ExpTypes, AllRecords, Acc) ->
+initialize_constraints([Constr|Rest], Module, RecDict, ExpTypes, AllRecords, Acc) ->
case Constr of
{type, _, constraint, [{atom, _, is_subtype}, [Type1, Type2]]} ->
- T1 = final_form(Type1, RecDict, ExpTypes, AllRecords, dict:new()),
+ T1 = final_form(Type1, Module, ExpTypes, AllRecords, dict:new()),
Entry = {T1, Type2},
- initialize_constraints(Rest, RecDict, ExpTypes, AllRecords, [Entry|Acc]);
+ initialize_constraints(Rest, Module, RecDict, ExpTypes, AllRecords, [Entry|Acc]);
{type, _, constraint, [{atom,_,Name}, List]} ->
N = length(List),
throw({error,
io_lib:format("Unsupported type guard ~w/~w\n", [Name, N])})
end.
-constraints_fixpoint(Constrs, RecDict, ExpTypes, AllRecords) ->
+constraints_fixpoint(Constrs, Module, RecDict, ExpTypes, AllRecords) ->
VarDict =
- constraints_to_dict(Constrs, RecDict, ExpTypes, AllRecords, dict:new()),
- constraints_fixpoint(VarDict, Constrs, RecDict, ExpTypes, AllRecords).
+ constraints_to_dict(Constrs, Module, RecDict, ExpTypes, AllRecords, dict:new()),
+ constraints_fixpoint(VarDict, Module, Constrs, RecDict, ExpTypes, AllRecords).
-constraints_fixpoint(OldVarDict, Constrs, RecDict, ExpTypes, AllRecords) ->
+constraints_fixpoint(OldVarDict, Module, Constrs, RecDict, ExpTypes, AllRecords) ->
NewVarDict =
- constraints_to_dict(Constrs, RecDict, ExpTypes, AllRecords, OldVarDict),
+ constraints_to_dict(Constrs, Module, RecDict, ExpTypes, AllRecords, OldVarDict),
case NewVarDict of
OldVarDict ->
DictFold =
@@ -479,25 +480,24 @@ constraints_fixpoint(OldVarDict, Constrs, RecDict, ExpTypes, AllRecords) ->
FinalConstrs = dict:fold(DictFold, [], NewVarDict),
{FinalConstrs, NewVarDict};
_Other ->
- constraints_fixpoint(NewVarDict, Constrs, RecDict, ExpTypes, AllRecords)
+ constraints_fixpoint(NewVarDict, Module, Constrs, RecDict, ExpTypes, AllRecords)
end.
-final_form(Form, RecDict, ExpTypes, AllRecords, VarDict) ->
- T1 = erl_types:t_from_form(Form, RecDict, VarDict),
- erl_types:t_solve_remote(T1, ExpTypes, AllRecords).
+final_form(Form, Module, ExpTypes, AllRecords, VarDict) ->
+ erl_types:t_from_form(Form, ExpTypes, Module, AllRecords, VarDict).
-constraints_to_dict(Constrs, RecDict, ExpTypes, AllRecords, VarDict) ->
+constraints_to_dict(Constrs, Module, RecDict, ExpTypes, AllRecords, VarDict) ->
Subtypes =
- constraints_to_subs(Constrs, RecDict, ExpTypes, AllRecords, VarDict, []),
+ constraints_to_subs(Constrs, Module, RecDict, ExpTypes, AllRecords, VarDict, []),
insert_constraints(Subtypes, dict:new()).
-constraints_to_subs([], _RecDict, _ExpTypes, _AllRecords, _VarDict, Acc) ->
+constraints_to_subs([], _Module, _RecDict, _ExpTypes, _AllRecords, _VarDict, Acc) ->
Acc;
-constraints_to_subs([C|Rest], RecDict, ExpTypes, AllRecords, VarDict, Acc) ->
+constraints_to_subs([C|Rest], Module, RecDict, ExpTypes, AllRecords, VarDict, Acc) ->
{T1, Form2} = C,
- T2 = final_form(Form2, RecDict, ExpTypes, AllRecords, VarDict),
+ T2 = final_form(Form2, Module, ExpTypes, AllRecords, VarDict),
NewAcc = [{subtype, T1, T2}|Acc],
- constraints_to_subs(Rest, RecDict, ExpTypes, AllRecords, VarDict, NewAcc).
+ constraints_to_subs(Rest, Module, RecDict, ExpTypes, AllRecords, VarDict, NewAcc).
%% Replaces variables with '_' when necessary to break up cycles among
%% the constraints.
@@ -585,7 +585,7 @@ general_domain([], AccSig) ->
-spec get_invalid_contract_warnings([module()],
dialyzer_codeserver:codeserver(),
dialyzer_plt:plt(),
- opaques_fun()) -> [dial_warning()].
+ opaques_fun()) -> [raw_warning()].
get_invalid_contract_warnings(Modules, CodeServer, Plt, FindOpaques) ->
get_invalid_contract_warnings_modules(Modules, CodeServer, Plt, FindOpaques, []).
@@ -609,12 +609,14 @@ get_invalid_contract_warnings_funs([{MFA, {FileLine, Contract, _Xtra}}|Left],
Sig = erl_types:t_fun(Args, Ret),
{M, _F, _A} = MFA,
Opaques = FindOpaques(M),
+ {File, Line} = FileLine,
+ WarningInfo = {File, Line, MFA},
NewAcc =
case check_contract(Contract, Sig, Opaques) of
{error, invalid_contract} ->
- [invalid_contract_warning(MFA, FileLine, Sig, RecDict)|Acc];
+ [invalid_contract_warning(MFA, WarningInfo, Sig, RecDict)|Acc];
{error, {overlapping_contract, []}} ->
- [overlapping_contract_warning(MFA, FileLine)|Acc];
+ [overlapping_contract_warning(MFA, WarningInfo)|Acc];
{error, {extra_range, ExtraRanges, STRange}} ->
Warn =
case t_from_forms_without_remote(Contract#contract.forms,
@@ -627,12 +629,12 @@ get_invalid_contract_warnings_funs([{MFA, {FileLine, Contract, _Xtra}}|Left],
end,
case Warn of
true ->
- [extra_range_warning(MFA, FileLine, ExtraRanges, STRange)|Acc];
+ [extra_range_warning(MFA, WarningInfo, ExtraRanges, STRange)|Acc];
false ->
Acc
end;
{error, Msg} ->
- [{?WARN_CONTRACT_SYNTAX, FileLine, Msg}|Acc];
+ [{?WARN_CONTRACT_SYNTAX, WarningInfo, Msg}|Acc];
ok ->
{M, F, A} = MFA,
CSig0 = get_contract_signature(Contract),
@@ -646,14 +648,14 @@ get_invalid_contract_warnings_funs([{MFA, {FileLine, Contract, _Xtra}}|Left],
BifSig = erl_types:t_fun(BifArgs, BifRet),
case check_contract(Contract, BifSig, Opaques) of
{error, _} ->
- [invalid_contract_warning(MFA, FileLine, BifSig, RecDict)
+ [invalid_contract_warning(MFA, WarningInfo, BifSig, RecDict)
|Acc];
ok ->
- picky_contract_check(CSig, BifSig, MFA, FileLine,
+ picky_contract_check(CSig, BifSig, MFA, WarningInfo,
Contract, RecDict, Acc)
end;
false ->
- picky_contract_check(CSig, Sig, MFA, FileLine, Contract,
+ picky_contract_check(CSig, Sig, MFA, WarningInfo, Contract,
RecDict, Acc)
end
end,
@@ -662,20 +664,20 @@ get_invalid_contract_warnings_funs([{MFA, {FileLine, Contract, _Xtra}}|Left],
get_invalid_contract_warnings_funs([], _Plt, _RecDict, _FindOpaques, Acc) ->
Acc.
-invalid_contract_warning({M, F, A}, FileLine, SuccType, RecDict) ->
+invalid_contract_warning({M, F, A}, WarningInfo, SuccType, RecDict) ->
SuccTypeStr = dialyzer_utils:format_sig(SuccType, RecDict),
- {?WARN_CONTRACT_TYPES, FileLine, {invalid_contract, [M, F, A, SuccTypeStr]}}.
+ {?WARN_CONTRACT_TYPES, WarningInfo, {invalid_contract, [M, F, A, SuccTypeStr]}}.
-overlapping_contract_warning({M, F, A}, FileLine) ->
- {?WARN_CONTRACT_TYPES, FileLine, {overlapping_contract, [M, F, A]}}.
+overlapping_contract_warning({M, F, A}, WarningInfo) ->
+ {?WARN_CONTRACT_TYPES, WarningInfo, {overlapping_contract, [M, F, A]}}.
-extra_range_warning({M, F, A}, FileLine, ExtraRanges, STRange) ->
+extra_range_warning({M, F, A}, WarningInfo, ExtraRanges, STRange) ->
ERangesStr = erl_types:t_to_string(ExtraRanges),
STRangeStr = erl_types:t_to_string(STRange),
- {?WARN_CONTRACT_SUPERTYPE, FileLine,
+ {?WARN_CONTRACT_SUPERTYPE, WarningInfo,
{extra_range, [M, F, A, ERangesStr, STRangeStr]}}.
-picky_contract_check(CSig0, Sig0, MFA, FileLine, Contract, RecDict, Acc) ->
+picky_contract_check(CSig0, Sig0, MFA, WarningInfo, Contract, RecDict, Acc) ->
CSig = erl_types:t_abstract_records(CSig0, RecDict),
Sig = erl_types:t_abstract_records(Sig0, RecDict),
case erl_types:t_is_equal(CSig, Sig) of
@@ -685,7 +687,7 @@ picky_contract_check(CSig0, Sig0, MFA, FileLine, Contract, RecDict, Acc) ->
erl_types:t_is_unit(erl_types:t_fun_range(CSig))) of
true -> Acc;
false ->
- case extra_contract_warning(MFA, FileLine, Contract,
+ case extra_contract_warning(MFA, WarningInfo, Contract,
CSig0, Sig0, RecDict) of
no_warning -> Acc;
{warning, Warning} -> [Warning|Acc]
@@ -693,7 +695,7 @@ picky_contract_check(CSig0, Sig0, MFA, FileLine, Contract, RecDict, Acc) ->
end
end.
-extra_contract_warning({M, F, A}, FileLine, Contract, CSig, Sig, RecDict) ->
+extra_contract_warning({M, F, A}, WarningInfo, Contract, CSig, Sig, RecDict) ->
%% We do not want to depend upon erl_types:t_to_string() possibly
%% hiding the contents of opaque types.
SigUnopaque = erl_types:t_unopaque(Sig),
@@ -724,7 +726,7 @@ extra_contract_warning({M, F, A}, FileLine, Contract, CSig, Sig, RecDict) ->
{?WARN_CONTRACT_NOT_EQUAL,
{contract_diff, [M, F, A, ContractString, SigString]}}
end,
- {warning, {Tag, FileLine, Msg}}
+ {warning, {Tag, WarningInfo, Msg}}
end.
is_remote_types_related(Contract, CSig, Sig, RecDict) ->
@@ -751,8 +753,7 @@ is_remote_types_related(Contract, CSig, Sig, RecDict) ->
end.
t_from_forms_without_remote([{FType, []}], RecDict) ->
- Type0 = erl_types:t_from_form(FType, RecDict),
- Type1 = erl_types:subst_all_remote(Type0, erl_types:t_none()),
+ Type1 = erl_types:t_from_form_without_remote(FType, RecDict),
{ok, erl_types:subst_all_vars_to_any(Type1)};
t_from_forms_without_remote([{_FType, _Constrs}], _RecDict) ->
%% 'When' constraints
diff --git a/lib/dialyzer/src/dialyzer_dataflow.erl b/lib/dialyzer/src/dialyzer_dataflow.erl
index 46467a1303..336b4641d4 100644
--- a/lib/dialyzer/src/dialyzer_dataflow.erl
+++ b/lib/dialyzer/src/dialyzer_dataflow.erl
@@ -2,7 +2,7 @@
%%--------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -28,14 +28,15 @@
-module(dialyzer_dataflow).
--export([get_fun_types/4, get_warnings/5, format_args/3]).
+-export([get_fun_types/5, get_warnings/5, format_args/3]).
%% Data structure interfaces.
-export([state__add_warning/2, state__cleanup/1,
state__duplicate/1, dispose_state/1,
state__get_callgraph/1, state__get_races/1,
state__get_records/1, state__put_callgraph/2,
- state__put_races/2, state__records_only/1]).
+ state__put_races/2, state__records_only/1,
+ state__find_function/2]).
-export_type([state/0]).
@@ -89,6 +90,8 @@
-type type() :: erl_types:erl_type().
-type types() :: erl_types:type_table().
+-type curr_fun() :: 'undefined' | 'top' | mfa_or_funlbl().
+
-define(no_arg, no_arg).
-define(TYPE_LIMIT, 3).
@@ -96,17 +99,20 @@
-define(BITS, 128).
-record(state, {callgraph :: dialyzer_callgraph:callgraph(),
+ codeserver :: dialyzer_codeserver:codeserver(),
envs :: env_tab(),
fun_tab :: fun_tab(),
+ fun_homes :: dict:dict(label(), mfa()),
plt :: dialyzer_plt:plt(),
opaques :: [type()],
races = dialyzer_races:new() :: dialyzer_races:races(),
records = dict:new() :: types(),
tree_map :: dict:dict(label(), cerl:cerl()),
warning_mode = false :: boolean(),
- warnings = [] :: [dial_warning()],
+ warnings = [] :: [raw_warning()],
work :: {[_], [_], sets:set()},
- module :: module()
+ module :: module(),
+ curr_fun :: curr_fun()
}).
-record(map, {dict = dict:new() :: type_tab(),
@@ -115,7 +121,6 @@
modified_stack = [] :: [{[Key :: term()],reference()}],
ref = undefined :: reference() | undefined}).
--type nowarn() :: dialyzer_analysis_callgraph:no_warn_unused().
-type env_tab() :: dict:dict(label(), #map{}).
-type fun_entry() :: {Args :: [type()], RetType :: type()}.
-type fun_tab() :: dict:dict('top' | label(),
@@ -133,22 +138,24 @@
-type fun_types() :: dict:dict(label(), type()).
-spec get_warnings(cerl:c_module(), dialyzer_plt:plt(),
- dialyzer_callgraph:callgraph(), types(), nowarn()) ->
- {[dial_warning()], fun_types()}.
-
-get_warnings(Tree, Plt, Callgraph, Records, NoWarnUnused) ->
- State1 = analyze_module(Tree, Plt, Callgraph, Records, true),
- State2 =
- state__renew_warnings(state__get_warnings(State1, NoWarnUnused), State1),
+ dialyzer_callgraph:callgraph(),
+ dialyzer_codeserver:codeserver(),
+ types()) ->
+ {[raw_warning()], fun_types()}.
+
+get_warnings(Tree, Plt, Callgraph, Codeserver, Records) ->
+ State1 = analyze_module(Tree, Plt, Callgraph, Codeserver, Records, true),
+ State2 = state__renew_warnings(state__get_warnings(State1), State1),
State3 = state__get_race_warnings(State2),
{State3#state.warnings, state__all_fun_types(State3)}.
-spec get_fun_types(cerl:c_module(), dialyzer_plt:plt(),
dialyzer_callgraph:callgraph(),
+ dialyzer_codeserver:codeserver(),
types()) -> fun_types().
-get_fun_types(Tree, Plt, Callgraph, Records) ->
- State = analyze_module(Tree, Plt, Callgraph, Records, false),
+get_fun_types(Tree, Plt, Callgraph, Codeserver, Records) ->
+ State = analyze_module(Tree, Plt, Callgraph, Codeserver, Records, false),
state__all_fun_types(State).
%%% ===========================================================================
@@ -157,11 +164,11 @@ get_fun_types(Tree, Plt, Callgraph, Records) ->
%%%
%%% ===========================================================================
-analyze_module(Tree, Plt, Callgraph, Records, GetWarnings) ->
+analyze_module(Tree, Plt, Callgraph, Codeserver, Records, GetWarnings) ->
debug_pp(Tree, false),
Module = cerl:atom_val(cerl:module_name(Tree)),
TopFun = cerl:ann_c_fun([{label, top}], [], Tree),
- State = state__new(Callgraph, TopFun, Plt, Module, Records),
+ State = state__new(Callgraph, Codeserver, TopFun, Plt, Module, Records),
State1 = state__race_analysis(not GetWarnings, State),
State2 = analyze_loop(State1),
case GetWarnings of
@@ -175,25 +182,26 @@ analyze_module(Tree, Plt, Callgraph, Records, GetWarnings) ->
analyze_loop(State) ->
case state__get_work(State) of
- none -> State;
- {Fun, NewState1} ->
+ none -> state__set_curr_fun(undefined, State);
+ {Fun, NewState0} ->
+ NewState1 = state__set_curr_fun(get_label(Fun), NewState0),
{ArgTypes, IsCalled} = state__get_args_and_status(Fun, NewState1),
case not IsCalled of
true ->
?debug("Not handling (not called) ~w: ~s\n",
- [state__lookup_name(get_label(Fun), State),
+ [NewState1#state.curr_fun,
t_to_string(t_product(ArgTypes))]),
analyze_loop(NewState1);
false ->
case state__fun_env(Fun, NewState1) of
none ->
?debug("Not handling (no env) ~w: ~s\n",
- [state__lookup_name(get_label(Fun), State),
+ [NewState1#state.curr_fun,
t_to_string(t_product(ArgTypes))]),
analyze_loop(NewState1);
Map ->
?debug("Handling fun ~p: ~s\n",
- [state__lookup_name(get_label(Fun), State),
+ [NewState1#state.curr_fun,
t_to_string(state__fun_type(Fun, NewState1))]),
Vars = cerl:fun_vars(Fun),
Map1 = enter_type_lists(Vars, ArgTypes, Map),
@@ -212,7 +220,7 @@ analyze_loop(State) ->
{NewState4, _Map2, BodyType} =
traverse(Body, Map1, NewState3),
?debug("Done analyzing: ~w:~s\n",
- [state__lookup_name(get_label(Fun), State),
+ [NewState1#state.curr_fun,
t_to_string(t_fun(ArgTypes, BodyType))]),
NewState5 =
case IsRaceAnalysisEnabled of
@@ -2780,9 +2788,9 @@ filter_match_fail([]) ->
%%%
%%% ===========================================================================
-state__new(Callgraph, Tree, Plt, Module, Records) ->
+state__new(Callgraph, Codeserver, Tree, Plt, Module, Records) ->
Opaques = erl_types:t_opaque_from_records(Records),
- TreeMap = build_tree_map(Tree),
+ {TreeMap, FunHomes} = build_tree_map(Tree, Callgraph),
Funs = dict:fetch_keys(TreeMap),
FunTab = init_fun_tab(Funs, dict:new(), TreeMap, Callgraph, Plt),
ExportedFuns =
@@ -2790,7 +2798,8 @@ state__new(Callgraph, Tree, Plt, Module, Records) ->
Work = init_work(ExportedFuns),
Env = lists:foldl(fun(Fun, Env) -> dict:store(Fun, map__new(), Env) end,
dict:new(), Funs),
- #state{callgraph = Callgraph, envs = Env, fun_tab = FunTab, opaques = Opaques,
+ #state{callgraph = Callgraph, codeserver = Codeserver,
+ envs = Env, fun_tab = FunTab, fun_homes = FunHomes, opaques = Opaques,
plt = Plt, races = dialyzer_races:new(), records = Records,
warning_mode = false, warnings = [], work = Work, tree_map = TreeMap,
module = Module}.
@@ -2829,7 +2838,7 @@ state__renew_race_list(RaceList, RaceListSize,
state__renew_warnings(Warnings, State) ->
State#state{warnings = Warnings}.
--spec state__add_warning(dial_warning(), state()) -> state().
+-spec state__add_warning(raw_warning(), state()) -> state().
state__add_warning(Warn, #state{warnings = Warnings} = State) ->
State#state{warnings = [Warn|Warnings]}.
@@ -2844,29 +2853,45 @@ state__add_warning(#state{warnings = Warnings, warning_mode = true} = State,
Ann = cerl:get_ann(Tree),
case Force of
true ->
- Warn = {Tag, {get_file(Ann), abs(get_line(Ann))}, Msg},
+ WarningInfo = {get_file(Ann),
+ abs(get_line(Ann)),
+ State#state.curr_fun},
+ Warn = {Tag, WarningInfo, Msg},
?debug("MSG ~s\n", [dialyzer:format_warning(Warn)]),
State#state{warnings = [Warn|Warnings]};
false ->
case is_compiler_generated(Ann) of
- true -> State;
- false ->
- Warn = {Tag, {get_file(Ann), get_line(Ann)}, Msg},
+ true -> State;
+ false ->
+ WarningInfo = {get_file(Ann), get_line(Ann), State#state.curr_fun},
+ Warn = {Tag, WarningInfo, Msg},
?debug("MSG ~s\n", [dialyzer:format_warning(Warn)]),
- State#state{warnings = [Warn|Warnings]}
+ State#state{warnings = [Warn|Warnings]}
end
end.
+-spec state__set_curr_fun(curr_fun(), state()) -> state().
+
+state__set_curr_fun(undefined, State) ->
+ State#state{curr_fun = undefined};
+state__set_curr_fun(FunLbl, State) ->
+ State#state{curr_fun = find_function(FunLbl, State)}.
+
+-spec state__find_function(mfa_or_funlbl(), state()) -> mfa_or_funlbl().
+
+state__find_function(FunLbl, State) ->
+ find_function(FunLbl, State).
+
state__get_race_warnings(#state{races = Races} = State) ->
{Races1, State1} = dialyzer_races:get_race_warnings(Races, State),
State1#state{races = Races1}.
state__get_warnings(#state{tree_map = TreeMap, fun_tab = FunTab,
- callgraph = Callgraph, plt = Plt} = State,
- NoWarnUnused) ->
+ callgraph = Callgraph, plt = Plt} = State) ->
FoldFun =
fun({top, _}, AccState) -> AccState;
({FunLbl, Fun}, AccState) ->
+ AccState1 = state__set_curr_fun(FunLbl, AccState),
{NotCalled, Ret} =
case dict:fetch(get_label(Fun), FunTab) of
{not_handled, {_Args0, Ret0}} -> {true, Ret0};
@@ -2874,17 +2899,12 @@ state__get_warnings(#state{tree_map = TreeMap, fun_tab = FunTab,
end,
case NotCalled of
true ->
- {Warn, Msg} =
- case dialyzer_callgraph:lookup_name(FunLbl, Callgraph) of
- error -> {false, {}};
- {ok, {_M, F, A} = MFA} ->
- {not sets:is_element(MFA, NoWarnUnused),
- {unused_fun, [F, A]}}
- end,
- case Warn of
- true -> state__add_warning(AccState, ?WARN_NOT_CALLED, Fun, Msg);
- false -> AccState
- end;
+ case dialyzer_callgraph:lookup_name(FunLbl, Callgraph) of
+ error -> AccState1;
+ {ok, {_M, F, A}} ->
+ Msg = {unused_fun, [F, A]},
+ state__add_warning(AccState1, ?WARN_NOT_CALLED, Fun, Msg)
+ end;
false ->
{Name, Contract} =
case dialyzer_callgraph:lookup_name(FunLbl, Callgraph) of
@@ -2897,7 +2917,7 @@ state__get_warnings(#state{tree_map = TreeMap, fun_tab = FunTab,
%% Check if the function has a contract that allows this.
Warn =
case Contract of
- none -> not parent_allows_this(FunLbl, State);
+ none -> not parent_allows_this(FunLbl, AccState1);
{value, C} ->
GenRet = dialyzer_contracts:get_contract_return(C),
not t_is_unit(GenRet)
@@ -2907,19 +2927,19 @@ state__get_warnings(#state{tree_map = TreeMap, fun_tab = FunTab,
case classify_returns(Fun) of
no_match ->
Msg = {no_return, [no_match|Name]},
- state__add_warning(AccState, ?WARN_RETURN_NO_RETURN,
+ state__add_warning(AccState1, ?WARN_RETURN_NO_RETURN,
Fun, Msg);
only_explicit ->
Msg = {no_return, [only_explicit|Name]},
- state__add_warning(AccState, ?WARN_RETURN_ONLY_EXIT,
+ state__add_warning(AccState1, ?WARN_RETURN_ONLY_EXIT,
Fun, Msg);
only_normal ->
Msg = {no_return, [only_normal|Name]},
- state__add_warning(AccState, ?WARN_RETURN_NO_RETURN,
+ state__add_warning(AccState1, ?WARN_RETURN_NO_RETURN,
Fun, Msg);
both ->
Msg = {no_return, [both|Name]},
- state__add_warning(AccState, ?WARN_RETURN_NO_RETURN,
+ state__add_warning(AccState1, ?WARN_RETURN_NO_RETURN,
Fun, Msg)
end;
false ->
@@ -2957,8 +2977,10 @@ state__lookup_name(Fun, #state{callgraph = Callgraph}) ->
state__lookup_record(Tag, Arity, #state{records = Records}) ->
case erl_types:lookup_record(Tag, Arity, Records) of
{ok, Fields} ->
- {ok, t_tuple([t_atom(Tag)|
- [FieldType || {_FieldName, FieldType} <- Fields]])};
+ RecType =
+ t_tuple([t_atom(Tag)|
+ [FieldType || {_FieldName, _Abstr, FieldType} <- Fields]]),
+ {ok, RecType};
error ->
error
end.
@@ -2970,17 +2992,31 @@ state__get_args_and_status(Tree, #state{fun_tab = FunTab}) ->
{ok, {ArgTypes, _}} -> {ArgTypes, true}
end.
-build_tree_map(Tree) ->
+build_tree_map(Tree, Callgraph) ->
Fun =
- fun(T, Dict) ->
+ fun(T, {Dict, Homes, FunLbls} = Acc) ->
case cerl:is_c_fun(T) of
true ->
- dict:store(get_label(T), T, Dict);
+ FunLbl = get_label(T),
+ Dict1 = dict:store(FunLbl, T, Dict),
+ case catch dialyzer_callgraph:lookup_name(FunLbl, Callgraph) of
+ {ok, MFA} ->
+ F2 =
+ fun(Lbl, Dict0) ->
+ dict:store(Lbl, MFA, Dict0)
+ end,
+ Homes1 = lists:foldl(F2, Homes, [FunLbl|FunLbls]),
+ {Dict1, Homes1, []};
+ _ ->
+ {Dict1, Homes, [FunLbl|FunLbls]}
+ end;
false ->
- Dict
+ Acc
end
end,
- cerl_trees:fold(Fun, dict:new(), Tree).
+ Dict0 = dict:new(),
+ {Dict, Homes, _} = cerl_trees:fold(Fun, {Dict0, Dict0, []}, Tree),
+ {Dict, Homes}.
init_fun_tab([top|Left], Dict, TreeMap, Callgraph, Plt) ->
NewDict = dict:store(top, {[], t_none()}, Dict),
@@ -3438,6 +3474,13 @@ parent_allows_this(FunLbl, #state{callgraph = Callgraph, plt = Plt} =State) ->
end
end.
+find_function({_, _, _} = MFA, _State) ->
+ MFA;
+find_function(top, _State) ->
+ top;
+find_function(FunLbl, #state{fun_homes = Homes}) ->
+ dict:fetch(FunLbl, Homes).
+
classify_returns(Tree) ->
case find_terminals(cerl:fun_body(Tree)) of
{false, false} -> no_match;
diff --git a/lib/dialyzer/src/dialyzer_options.erl b/lib/dialyzer/src/dialyzer_options.erl
index a92b8b1958..20971f1407 100644
--- a/lib/dialyzer/src/dialyzer_options.erl
+++ b/lib/dialyzer/src/dialyzer_options.erl
@@ -28,7 +28,7 @@
-module(dialyzer_options).
--export([build/1]).
+-export([build/1, build_warnings/2]).
-include("dialyzer.hrl").
@@ -270,7 +270,7 @@ assert_solvers([v2|Terms]) ->
assert_solvers([Term|_]) ->
bad_option("Illegal value for solver", Term).
--spec build_warnings([atom()], [dial_warning()]) -> [dial_warning()].
+-spec build_warnings([atom()], dial_warn_tags()) -> dial_warn_tags().
build_warnings([Opt|Opts], Warnings) ->
NewWarnings =
diff --git a/lib/dialyzer/src/dialyzer_races.erl b/lib/dialyzer/src/dialyzer_races.erl
index 2a8aba5d8f..48eb331239 100644
--- a/lib/dialyzer/src/dialyzer_races.erl
+++ b/lib/dialyzer/src/dialyzer_races.erl
@@ -85,9 +85,9 @@
-type race_tag() :: 'whereis_register' | 'whereis_unregister'
| 'ets_lookup_insert' | 'mnesia_dirty_read_write'.
-%% The following type is similar to the dial_warning() type but has a
+%% The following type is similar to the raw_warning() type but has a
%% tag which is local to this module and is not propagated to outside
--type dial_race_warning() :: {race_warn_tag(), file_line(), {atom(), [term()]}}.
+-type dial_race_warning() :: {race_warn_tag(), warning_info(), {atom(), [term()]}}.
-type race_warn_tag() :: ?WARN_WHEREIS_REGISTER | ?WARN_WHEREIS_UNREGISTER
| ?WARN_ETS_LOOKUP_INSERT | ?WARN_MNESIA_DIRTY_READ_WRITE.
@@ -312,10 +312,13 @@ race(State) ->
DepList = fixup_race_list(RaceWarnTag, VarArgs, State1),
{State2, RaceWarn} =
get_race_warn(Fun, Args, ArgTypes, DepList, State),
+ {File, Line} = FileLine,
+ CurrMFA = dialyzer_dataflow:state__find_function(CurrFun, State),
+ WarningInfo = {File, Line, CurrMFA},
race(
state__add_race_warning(
state__renew_race_tags(T, State2), RaceWarn, RaceWarnTag,
- FileLine))
+ WarningInfo))
end,
state__renew_race_tags([], RetState).
@@ -2324,7 +2327,7 @@ get_race_warnings_helper(Warnings, State) ->
[] ->
{dialyzer_dataflow:state__get_races(State), State};
[H|T] ->
- {RaceWarnTag, FileLine, {race_condition, [M, F, A, AT, S, DepList]}} = H,
+ {RaceWarnTag, WarningInfo, {race_condition, [M, F, A, AT, S, DepList]}} = H,
Reason =
case RaceWarnTag of
?WARN_WHEREIS_REGISTER ->
@@ -2347,7 +2350,7 @@ get_race_warnings_helper(Warnings, State) ->
"caused by its combination with ")
end,
W =
- {?WARN_RACE_CONDITION, FileLine,
+ {?WARN_RACE_CONDITION, WarningInfo,
{race_condition,
[M, F, dialyzer_dataflow:format_args(A, AT, S), Reason]}},
get_race_warnings_helper(T,
@@ -2377,12 +2380,12 @@ get_reason(DependencyList, Reason) ->
end
end.
-state__add_race_warning(State, RaceWarn, RaceWarnTag, FileLine) ->
+state__add_race_warning(State, RaceWarn, RaceWarnTag, WarningInfo) ->
case RaceWarn of
no_race -> State;
_Else ->
Races = dialyzer_dataflow:state__get_races(State),
- Warn = {RaceWarnTag, FileLine, RaceWarn},
+ Warn = {RaceWarnTag, WarningInfo, RaceWarn},
dialyzer_dataflow:state__put_races(add_race_warning(Warn, Races), State)
end.
diff --git a/lib/dialyzer/src/dialyzer_succ_typings.erl b/lib/dialyzer/src/dialyzer_succ_typings.erl
index 6dc4285194..7ceb19e30a 100644
--- a/lib/dialyzer/src/dialyzer_succ_typings.erl
+++ b/lib/dialyzer/src/dialyzer_succ_typings.erl
@@ -29,7 +29,7 @@
-export([analyze_callgraph/3,
analyze_callgraph/6,
- get_warnings/8
+ get_warnings/7
]).
-export([
@@ -69,10 +69,8 @@
-type scc() :: [mfa_or_funlbl()] | [module()].
-
-record(st, {callgraph :: dialyzer_callgraph:callgraph(),
codeserver :: dialyzer_codeserver:codeserver(),
- no_warn_unused :: sets:set(mfa()),
parent = none :: parent(),
timing_server :: dialyzer_timing:timing_server(),
solvers :: [solver()],
@@ -137,18 +135,17 @@ get_refined_success_typings(SCCs, #st{callgraph = Callgraph,
-type doc_plt() :: 'undefined' | dialyzer_plt:plt().
-spec get_warnings(dialyzer_callgraph:callgraph(), dialyzer_plt:plt(),
- doc_plt(), dialyzer_codeserver:codeserver(), sets:set(mfa()),
+ doc_plt(), dialyzer_codeserver:codeserver(),
dialyzer_timing:timing_server(), [solver()], pid()) ->
- {[dial_warning()], dialyzer_plt:plt(), doc_plt()}.
+ {[raw_warning()], dialyzer_plt:plt(), doc_plt()}.
get_warnings(Callgraph, Plt, DocPlt, Codeserver,
- NoWarnUnused, TimingServer, Solvers, Parent) ->
+ TimingServer, Solvers, Parent) ->
InitState =
init_state_and_get_success_typings(Callgraph, Plt, Codeserver,
TimingServer, Solvers, Parent),
- NewState = InitState#st{no_warn_unused = NoWarnUnused},
- Mods = dialyzer_callgraph:modules(NewState#st.callgraph),
- MiniPlt = NewState#st.plt,
+ Mods = dialyzer_callgraph:modules(InitState#st.callgraph),
+ MiniPlt = InitState#st.plt,
FindOpaques = lookup_and_find_opaques_fun(Codeserver),
CWarns =
dialyzer_contracts:get_invalid_contract_warnings(Mods, Codeserver,
@@ -156,31 +153,30 @@ get_warnings(Callgraph, Plt, DocPlt, Codeserver,
MiniDocPlt = dialyzer_plt:get_mini_plt(DocPlt),
ModWarns =
?timing(TimingServer, "warning",
- get_warnings_from_modules(Mods, NewState, MiniDocPlt)),
+ get_warnings_from_modules(Mods, InitState, MiniDocPlt)),
{postprocess_warnings(CWarns ++ ModWarns, Codeserver),
dialyzer_plt:restore_full_plt(MiniPlt, Plt),
dialyzer_plt:restore_full_plt(MiniDocPlt, DocPlt)}.
get_warnings_from_modules(Mods, State, DocPlt) ->
#st{callgraph = Callgraph, codeserver = Codeserver,
- no_warn_unused = NoWarnUnused, plt = Plt,
- timing_server = TimingServer} = State,
- Init = {Codeserver, Callgraph, NoWarnUnused, Plt, DocPlt},
+ plt = Plt, timing_server = TimingServer} = State,
+ Init = {Codeserver, Callgraph, Plt, DocPlt},
dialyzer_coordinator:parallel_job(warnings, Mods, Init, TimingServer).
--spec collect_warnings(module(), warnings_init_data()) -> [dial_warning()].
+-spec collect_warnings(module(), warnings_init_data()) -> [raw_warning()].
-collect_warnings(M, {Codeserver, Callgraph, NoWarnUnused, Plt, DocPlt}) ->
+collect_warnings(M, {Codeserver, Callgraph, Plt, DocPlt}) ->
ModCode = dialyzer_codeserver:lookup_mod_code(M, Codeserver),
Records = dialyzer_codeserver:lookup_mod_records(M, Codeserver),
Contracts = dialyzer_codeserver:lookup_mod_contracts(M, Codeserver),
AllFuns = collect_fun_info([ModCode]),
%% Check if there are contracts for functions that do not exist
- Warnings1 =
+ Warnings1 =
dialyzer_contracts:contracts_without_fun(Contracts, AllFuns, Callgraph),
{Warnings2, FunTypes} =
- dialyzer_dataflow:get_warnings(ModCode, Plt, Callgraph,
- Records, NoWarnUnused),
+ dialyzer_dataflow:get_warnings(ModCode, Plt, Callgraph, Codeserver,
+ Records),
Attrs = cerl:module_attrs(ModCode),
Warnings3 =
dialyzer_behaviours:check_callbacks(M, Attrs, Records, Plt, Codeserver),
@@ -197,17 +193,19 @@ postprocess_warnings(RawWarnings, Codeserver) ->
postprocess_dataflow_warns([], _Callgraph, WAcc, Acc) ->
lists:reverse(Acc, WAcc);
-postprocess_dataflow_warns([{?WARN_CONTRACT_RANGE, {CallF, CallL}, Msg}|Rest],
+postprocess_dataflow_warns([{?WARN_CONTRACT_RANGE, WarningInfo, Msg}|Rest],
Codeserver, WAcc, Acc) ->
+ {CallF, CallL, _CallMFA} = WarningInfo,
{contract_range, [Contract, M, F, A, ArgStrings, CRet]} = Msg,
case dialyzer_codeserver:lookup_mfa_contract({M,F,A}, Codeserver) of
- {ok, {{ContrF, _ContrL} = FileLine, _C, _X}} ->
+ {ok, {{ContrF, ContrL}, _C, _X}} ->
case CallF =:= ContrF of
true ->
NewMsg = {contract_range, [Contract, M, F, ArgStrings, CallL, CRet]},
- W = {?WARN_CONTRACT_RANGE, FileLine, NewMsg},
+ WarningInfo2 = {ContrF, ContrL, {M, F, A}},
+ W = {?WARN_CONTRACT_RANGE, WarningInfo2, NewMsg},
Filter =
- fun({?WARN_CONTRACT_TYPES, FL, _}) when FL =:= FileLine -> false;
+ fun({?WARN_CONTRACT_TYPES, WI, _}) when WI =:= WarningInfo2 -> false;
(_) -> true
end,
FilterWAcc = lists:filter(Filter, WAcc),
@@ -219,7 +217,7 @@ postprocess_dataflow_warns([{?WARN_CONTRACT_RANGE, {CallF, CallL}, Msg}|Rest],
%% The contract is not in a module that is currently under analysis.
%% We display the warning in the file/line of the call.
NewMsg = {contract_range, [Contract, M, F, ArgStrings, CallL, CRet]},
- W = {?WARN_CONTRACT_RANGE, {CallF, CallL}, NewMsg},
+ W = {?WARN_CONTRACT_RANGE, WarningInfo, NewMsg},
postprocess_dataflow_warns(Rest, Codeserver, WAcc, [W|Acc])
end.
@@ -262,7 +260,7 @@ refine_one_module(M, {CodeServer, Callgraph, Plt, _Solvers}) ->
Records = dialyzer_codeserver:lookup_mod_records(M, CodeServer),
FunTypes = get_fun_types_from_plt(AllFuns, Callgraph, Plt),
NewFunTypes =
- dialyzer_dataflow:get_fun_types(ModCode, Plt, Callgraph, Records),
+ dialyzer_dataflow:get_fun_types(ModCode, Plt, Callgraph, CodeServer, Records),
Contracts1 = dialyzer_codeserver:lookup_mod_contracts(M, CodeServer),
Contracts = orddict:from_list(dict:to_list(Contracts1)),
FindOpaques = find_opaques_fun(Records),
diff --git a/lib/dialyzer/src/dialyzer_typesig.erl b/lib/dialyzer/src/dialyzer_typesig.erl
index 3d03ed3ab3..1737bfd3a9 100644
--- a/lib/dialyzer/src/dialyzer_typesig.erl
+++ b/lib/dialyzer/src/dialyzer_typesig.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -3264,7 +3264,7 @@ lookup_record(Records, Tag, Arity) ->
{ok, Fields} ->
RecType =
t_tuple([t_from_term(Tag)|
- [FieldType || {_FieldName, FieldType} <- Fields]]),
+ [FieldType || {_FieldName, _Abstr, FieldType} <- Fields]]),
{ok, RecType};
error ->
error
@@ -3275,7 +3275,7 @@ is_literal_record(Tree) ->
lists:member(record, Ann).
family(L) ->
- sofs:to_external(sofs:rel2fam(sofs:relation(L))).
+ dialyzer_utils:family(L).
%% ============================================================================
%%
diff --git a/lib/dialyzer/src/dialyzer_utils.erl b/lib/dialyzer/src/dialyzer_utils.erl
index e5f5c69d45..1cc9528fed 100644
--- a/lib/dialyzer/src/dialyzer_utils.erl
+++ b/lib/dialyzer/src/dialyzer_utils.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -40,12 +40,16 @@
get_core_from_src/2,
get_record_and_type_info/1,
get_spec_info/3,
+ get_fun_meta_info/3,
+ is_suppressed_fun/2,
+ is_suppressed_tag/3,
merge_records/2,
pp_hook/0,
process_record_remote_types/1,
sets_filter/2,
src_compiler_opts/0,
- parallelism/0
+ parallelism/0,
+ family/1
]).
-include("dialyzer.hrl").
@@ -59,13 +63,13 @@ print_types(RecDict) ->
print_types1([], _) ->
ok;
-print_types1([{type, _Name} = Key|T], RecDict) ->
- {ok, {_Mod, Form, _Args}} = dict:find(Key, RecDict),
- io:format("\n~w: ~w\n", [Key, erl_types:t_from_form(Form, RecDict)]),
+print_types1([{type, _Name, _NArgs} = Key|T], RecDict) ->
+ {ok, {{_Mod, _Form, _Args}, Type}} = dict:find(Key, RecDict),
+ io:format("\n~w: ~w\n", [Key, Type]),
print_types1(T, RecDict);
-print_types1([{opaque, _Name} = Key|T], RecDict) ->
- {ok, {_Mod, Form, _Args}} = dict:find(Key, RecDict),
- io:format("\n~w: ~w\n", [Key, erl_types:t_from_form(Form, RecDict)]),
+print_types1([{opaque, _Name, _NArgs} = Key|T], RecDict) ->
+ {ok, {{_Mod, _Form, _Args}, Type}} = dict:find(Key, RecDict),
+ io:format("\n~w: ~w\n", [Key, Type]),
print_types1(T, RecDict);
print_types1([{record, _Name} = Key|T], RecDict) ->
{ok, [{_Arity, _Fields} = AF]} = dict:find(Key, RecDict),
@@ -80,7 +84,9 @@ print_types1([{record, _Name} = Key|T], RecDict) ->
-type abstract_code() :: [tuple()]. %% XXX: import from somewhere
-type comp_options() :: [compile:option()].
--type mod_or_fname() :: atom() | file:filename().
+-type mod_or_fname() :: module() | file:filename().
+-type fa() :: {atom(), arity()}.
+-type codeserver() :: dialyzer_codeserver:codeserver().
%% ============================================================================
%%
@@ -215,28 +221,29 @@ get_record_and_type_info([{attribute, _, type, {{record, Name}, Fields0, []}}
get_record_and_type_info([{attribute, _, Attr, {Name, TypeForm}}|Left],
Module, Records, RecDict) when Attr =:= 'type';
Attr =:= 'opaque' ->
- try
- NewRecDict = add_new_type(Attr, Name, TypeForm, [], Module, RecDict),
- get_record_and_type_info(Left, Module, Records, NewRecDict)
+ try add_new_type(Attr, Name, TypeForm, [], Module, RecDict) of
+ NewRecDict ->
+ get_record_and_type_info(Left, Module, Records, NewRecDict)
catch
throw:{error, _} = Error -> Error
end;
get_record_and_type_info([{attribute, _, Attr, {Name, TypeForm, Args}}|Left],
Module, Records, RecDict) when Attr =:= 'type';
Attr =:= 'opaque' ->
- try
- NewRecDict = add_new_type(Attr, Name, TypeForm, Args, Module, RecDict),
- get_record_and_type_info(Left, Module, Records, NewRecDict)
+ try add_new_type(Attr, Name, TypeForm, Args, Module, RecDict) of
+ NewRecDict ->
+ get_record_and_type_info(Left, Module, Records, NewRecDict)
catch
throw:{error, _} = Error -> Error
end;
get_record_and_type_info([_Other|Left], Module, Records, RecDict) ->
get_record_and_type_info(Left, Module, Records, RecDict);
get_record_and_type_info([], _Module, Records, RecDict) ->
- case type_record_fields(lists:reverse(Records), RecDict) of
- {ok, _NewRecDict} = Ok ->
- ?debug(_NewRecDict),
- Ok;
+ case
+ check_type_of_record_fields(lists:reverse(Records), RecDict)
+ of
+ ok ->
+ {ok, RecDict};
{error, Name, Error} ->
{error, flat_format(" Error while parsing #~w{}: ~s\n", [Name, Error])}
end.
@@ -248,20 +255,21 @@ add_new_type(TypeOrOpaque, Name, TypeForm, ArgForms, Module, RecDict) ->
Msg = flat_format("Type ~s/~w already defined\n", [Name, Arity]),
throw({error, Msg});
false ->
- ArgTypes = [erl_types:t_from_form(X) || X <- ArgForms],
- case lists:all(fun erl_types:t_is_var/1, ArgTypes) of
- true ->
- ArgNames = [erl_types:t_var_name(X) || X <- ArgTypes],
+ try erl_types:t_var_names(ArgForms) of
+ ArgNames ->
dict:store({TypeOrOpaque, Name, Arity},
- {Module, TypeForm, ArgNames}, RecDict);
- false ->
+ {{Module, TypeForm, ArgNames},
+ erl_types:t_any()}, RecDict)
+ catch
+ _:_ ->
throw({error, flat_format("Type declaration for ~w does not "
"have variables as parameters", [Name])})
end
end.
get_record_fields(Fields, RecDict) ->
- get_record_fields(Fields, RecDict, []).
+ Fs = get_record_fields(Fields, RecDict, []),
+ {ok, [{Name, Form, erl_types:t_any()} || {Name, Form} <- Fs]}.
get_record_fields([{typed_record_field, OrdRecField, TypeForm}|Left],
RecDict, Acc) ->
@@ -270,7 +278,7 @@ get_record_fields([{typed_record_field, OrdRecField, TypeForm}|Left],
{record_field, _Line, Name0} -> erl_parse:normalise(Name0);
{record_field, _Line, Name0, _Init} -> erl_parse:normalise(Name0)
end,
- get_record_fields(Left, RecDict, [{Name, TypeForm}|Acc]);
+ get_record_fields(Left, RecDict, [{Name, TypeForm}|Acc]);
get_record_fields([{record_field, _Line, Name}|Left], RecDict, Acc) ->
NewAcc = [{erl_parse:normalise(Name), {var, -1, '_'}}|Acc],
get_record_fields(Left, RecDict, NewAcc);
@@ -278,54 +286,66 @@ get_record_fields([{record_field, _Line, Name, _Init}|Left], RecDict, Acc) ->
NewAcc = [{erl_parse:normalise(Name), {var, -1, '_'}}|Acc],
get_record_fields(Left, RecDict, NewAcc);
get_record_fields([], _RecDict, Acc) ->
- {ok, lists:reverse(Acc)}.
+ lists:reverse(Acc).
-type_record_fields([], RecDict) ->
- {ok, RecDict};
-type_record_fields([RecKey|Recs], RecDict) ->
- {ok, [{Arity, Fields}]} = dict:find(RecKey, RecDict),
+%% Just check the local types. process_record_remote_types will add
+%% the types later.
+check_type_of_record_fields([], _RecDict) ->
+ ok;
+check_type_of_record_fields([RecKey|Recs], RecDict) ->
+ {ok, [{_Arity, Fields}]} = dict:find(RecKey, RecDict),
try
- TypedFields =
- [{FieldName, erl_types:t_from_form(FieldTypeForm, RecDict)}
- || {FieldName, FieldTypeForm} <- Fields],
- RecDict1 = dict:store(RecKey, [{Arity, TypedFields}], RecDict),
- Fun = fun(OldOrdDict) ->
- orddict:store(Arity, TypedFields, OldOrdDict)
- end,
- RecDict2 = dict:update(RecKey, Fun, RecDict1),
- type_record_fields(Recs, RecDict2)
+ [erl_types:t_from_form_without_remote(FieldTypeForm, RecDict)
+ || {_FieldName, FieldTypeForm, _} <- Fields]
+ of
+ L when is_list(L) ->
+ check_type_of_record_fields(Recs, RecDict)
catch
throw:{error, Error} ->
{record, Name} = RecKey,
{error, Name, Error}
end.
--spec process_record_remote_types(dialyzer_codeserver:codeserver()) -> dialyzer_codeserver:codeserver().
+-spec process_record_remote_types(codeserver()) -> codeserver().
+%% The field types are cached. Used during analysis when handling records.
process_record_remote_types(CServer) ->
TempRecords = dialyzer_codeserver:get_temp_records(CServer),
TempExpTypes = dialyzer_codeserver:get_temp_exported_types(CServer),
- RecordFun =
- fun(Key, Value) ->
- case Key of
- {record, _Name} ->
- FieldFun =
- fun(_Arity, Fields) ->
- [{Name, erl_types:t_solve_remote(Field, TempExpTypes,
- TempRecords)}
- || {Name, Field} <- Fields]
- end,
- orddict:map(FieldFun, Value);
- _Other -> Value
- end
- end,
ModuleFun =
- fun(_Module, Record) ->
+ fun(Module, Record) ->
+ RecordFun =
+ fun(Key, Value) ->
+ case Key of
+ {record, _Name} ->
+ FieldFun =
+ fun(_Arity, Fields) ->
+ [{Name, Field,
+ erl_types:t_from_form(Field,
+ TempExpTypes,
+ Module,
+ TempRecords)}
+ || {Name, Field, _} <- Fields]
+ end,
+ orddict:map(FieldFun, Value);
+ {opaque, _, _} ->
+ {{_Module, Form, _ArgNames}=F, _Type} = Value,
+ Type = erl_types:t_from_form(Form, TempExpTypes, Module,
+ TempRecords),
+ {F, Type};
+ _Other -> Value
+ end
+ end,
dict:map(RecordFun, Record)
end,
- NewRecords = dict:map(ModuleFun, TempRecords),
- CServer1 = dialyzer_codeserver:finalize_records(NewRecords, CServer),
- dialyzer_codeserver:finalize_exported_types(TempExpTypes, CServer1).
+ try dict:map(ModuleFun, TempRecords) of
+ NewRecords ->
+ CServer1 = dialyzer_codeserver:finalize_records(NewRecords, CServer),
+ dialyzer_codeserver:finalize_exported_types(TempExpTypes, CServer1)
+ catch
+ throw:{error, _RecName, _Error} = Error->
+ Error
+ end.
-spec merge_records(dict:dict(), dict:dict()) -> dict:dict().
@@ -341,7 +361,7 @@ merge_records(NewRecords, OldRecords) ->
-type spec_dict() :: dict:dict().
-type callback_dict() :: dict:dict().
--spec get_spec_info(atom(), abstract_code(), dict:dict()) ->
+-spec get_spec_info(module(), abstract_code(), dict:dict()) ->
{'ok', spec_dict(), callback_dict()} | {'error', string()}.
get_spec_info(ModName, AbstractCode, RecordsDict) ->
@@ -359,13 +379,6 @@ get_optional_callbacks(Abs) ->
is_fa_list(O)],
lists:append(L).
-is_fa_list([{FuncName, Arity}|L])
- when is_atom(FuncName), is_integer(Arity), Arity >= 0 ->
- is_fa_list(L);
-is_fa_list([]) -> true;
-is_fa_list(_) -> false.
-
-
%% TypeSpec is a list of conditional contracts for a function.
%% Each contract is of the form {[Argument], Range, [Constraint]} where
%% - Argument and Range are in erl_types:erl_type() format and
@@ -422,6 +435,126 @@ get_spec_info([], SpecDict, CallbackDict,
_RecordsDict, _ModName, _OptCb, _File) ->
{ok, SpecDict, CallbackDict}.
+-spec get_fun_meta_info(module(), abstract_code(), [dial_warn_tag()]) ->
+ dialyzer_codeserver:fun_meta_info().
+
+get_fun_meta_info(M, Abs, LegalWarnings) ->
+ NoWarn = get_nowarn_unused_function(M, Abs),
+ FuncSupp = get_func_suppressions(M, Abs),
+ Warnings0 = get_options(Abs, LegalWarnings),
+ Warnings = ordsets:to_list(Warnings0),
+ ModuleWarnings = [{M, W} || W <- Warnings],
+ RawProps = lists:append([NoWarn, FuncSupp, ModuleWarnings]),
+ process_options(dialyzer_utils:family(RawProps), Warnings0).
+
+process_options([{M, _}=Mod|Left], Warnings) when is_atom(M) ->
+ [Mod|process_options(Left, Warnings)];
+process_options([{{_M, _F, _A}=MFA, Opts}|Left], Warnings) ->
+ WL = case lists:member(nowarn_function, Opts) of
+ true -> [{nowarn_function, func}]; % takes precedence
+ false ->
+ Ws = dialyzer_options:build_warnings(Opts, Warnings),
+ ModOnly = [{W, mod} || W <- ordsets:subtract(Warnings, Ws)],
+ FunOnly = [{W, func} || W <- ordsets:subtract(Ws, Warnings)],
+ ordsets:union(ModOnly, FunOnly)
+ end,
+ case WL of
+ [] -> process_options(Left, Warnings);
+ _ -> [{MFA, WL}|process_options(Left, Warnings)]
+ end;
+process_options([], _Warnings) -> [].
+
+-spec get_nowarn_unused_function(module(), abstract_code()) ->
+ [{mfa(), 'no_unused'}].
+
+get_nowarn_unused_function(M, Abs) ->
+ Opts = get_options_with_tag(compile, Abs),
+ Warn = erl_lint:bool_option(warn_unused_function, nowarn_unused_function,
+ true, Opts),
+ Functions = [{F, A} || {function, _, F, A, _} <- Abs],
+ AttrFile = collect_attribute(Abs, compile),
+ TagsFaList = check_fa_list(AttrFile, nowarn_unused_function, Functions),
+ FAs = case Warn of
+ false -> Functions;
+ true ->
+ [FA || {{nowarn_unused_function,_L,_File}, FA} <- TagsFaList]
+ end,
+ [{{M, F, A}, no_unused} || {F, A} <- FAs].
+
+-spec get_func_suppressions(module(), abstract_code()) ->
+ [{mfa(), 'nowarn_function' | dial_warn_tag()}].
+
+get_func_suppressions(M, Abs) ->
+ Functions = [{F, A} || {function, _, F, A, _} <- Abs],
+ AttrFile = collect_attribute(Abs, dialyzer),
+ TagsFAs = check_fa_list(AttrFile, '*', Functions),
+ %% Check the options:
+ Fun = fun({{nowarn_function, _L, _File}, _FA}) -> ok;
+ ({OptLFile, _FA}) ->
+ _ = get_options1([OptLFile], ordsets:new())
+ end,
+ lists:foreach(Fun, TagsFAs),
+ [{{M, F, A}, W} || {{W, _L, _File}, {F, A}} <- TagsFAs].
+
+-spec get_options(abstract_code(), [dial_warn_tag()]) ->
+ ordsets:ordset(dial_warn_tag()).
+
+get_options(Abs, LegalWarnings) ->
+ AttrFile = collect_attribute(Abs, dialyzer),
+ get_options1(AttrFile, LegalWarnings).
+
+get_options1([{Args, L, File}|Left], Warnings) ->
+ Opts = [O ||
+ O <- lists:flatten([Args]),
+ is_atom(O)],
+ try dialyzer_options:build_warnings(Opts, Warnings) of
+ NewWarnings ->
+ get_options1(Left, NewWarnings)
+ catch
+ throw:{dialyzer_options_error, Msg} ->
+ Msg1 = flat_format(" ~s:~w: ~s", [File, L, Msg]),
+ throw({error, Msg1})
+ end;
+get_options1([], Warnings) ->
+ Warnings.
+
+-type collected_attribute() ::
+ {Args :: term(), erl_scan:line(), file:filename()}.
+
+collect_attribute(Abs, Tag) ->
+ collect_attribute(Abs, Tag, "nofile").
+
+collect_attribute([{attribute, L, Tag, Args}|Left], Tag, File) ->
+ CollAttr = {Args, L, File},
+ [CollAttr | collect_attribute(Left, Tag, File)];
+collect_attribute([{attribute, _, file, {IncludeFile, _}}|Left], Tag, _) ->
+ collect_attribute(Left, Tag, IncludeFile);
+collect_attribute([_Other|Left], Tag, File) ->
+ collect_attribute(Left, Tag, File);
+collect_attribute([], _Tag, _File) -> [].
+
+-spec is_suppressed_fun(mfa(), codeserver()) -> boolean().
+
+is_suppressed_fun(MFA, CodeServer) ->
+ lookup_fun_property(MFA, nowarn_function, CodeServer).
+
+-spec is_suppressed_tag(mfa() | module(), dial_warn_tag(), codeserver()) ->
+ boolean().
+
+is_suppressed_tag(MorMFA, Tag, Codeserver) ->
+ not lookup_fun_property(MorMFA, Tag, Codeserver).
+
+lookup_fun_property({M, _F, _A}=MFA, Property, CodeServer) ->
+ MFAPropList = dialyzer_codeserver:lookup_meta_info(MFA, CodeServer),
+ case proplists:get_value(Property, MFAPropList, no) of
+ mod -> false; % suppressed in function
+ func -> true; % requested in function
+ no -> lookup_fun_property(M, Property, CodeServer)
+ end;
+lookup_fun_property(M, Property, CodeServer) when is_atom(M) ->
+ MPropList = dialyzer_codeserver:lookup_meta_info(M, CodeServer),
+ proplists:is_defined(Property, MPropList).
+
%% ============================================================================
%%
%% Exported types
@@ -503,6 +636,57 @@ format_sig(Type, RecDict) ->
flat_format(Fmt, Lst) ->
lists:flatten(io_lib:format(Fmt, Lst)).
+-spec get_options_with_tag(atom(), abstract_code()) -> [term()].
+
+get_options_with_tag(Tag, Abs) ->
+ lists:flatten([O || {attribute, _, Tag0, O} <- Abs, Tag =:= Tag0]).
+
+%% Check F/A, and collect (unchecked) warning tags with line and file.
+-spec check_fa_list([collected_attribute()], atom(), [fa()]) ->
+ [{{atom(), erl_scan:line(), file:filename()},fa()}].
+
+check_fa_list(AttrFile, Tag, Functions) ->
+ FuncTab = gb_sets:from_list(Functions),
+ check_fa_list1(AttrFile, Tag, FuncTab).
+
+check_fa_list1([{Args, L, File}|Left], Tag, Funcs) ->
+ TermsL = [{{Tag0, L, File}, Term} ||
+ {Tags, Terms0} <- lists:flatten([Args]),
+ Tag0 <- lists:flatten([Tags]),
+ Tag =:= '*' orelse Tag =:= Tag0,
+ Term <- lists:flatten([Terms0])],
+ case lists:dropwhile(fun({_, T}) -> is_fa(T) end, TermsL) of
+ [] -> ok;
+ [{_, Bad}|_] ->
+ Msg1 = flat_format(" Bad function ~w in line ~s:~w",
+ [Bad, File, L]),
+ throw({error, Msg1})
+ end,
+ case lists:dropwhile(fun({_, FA}) -> is_known(FA, Funcs) end, TermsL) of
+ [] -> ok;
+ [{_, {F, A}}|_] ->
+ Msg2 = flat_format(" Unknown function ~w/~w in line ~s:~w",
+ [F, A, File, L]),
+ throw({error, Msg2})
+ end,
+ TermsL ++ check_fa_list1(Left, Tag, Funcs);
+check_fa_list1([], _Tag, _Funcs) -> [].
+
+is_known(FA, Funcs) ->
+ gb_sets:is_element(FA, Funcs).
+
+-spec is_fa_list(term()) -> boolean().
+
+is_fa_list([E|L]) -> is_fa(E) andalso is_fa_list(L);
+is_fa_list([]) -> true;
+is_fa_list(_) -> false.
+
+-spec is_fa(term()) -> boolean().
+
+is_fa({FuncName, Arity})
+ when is_atom(FuncName), is_integer(Arity), Arity >= 0 -> true;
+is_fa(_) -> false.
+
%%-------------------------------------------------------------------
%% Author : Per Gustafsson <[email protected]>
%% Description : Provides better printing of binaries.
@@ -607,3 +791,8 @@ parallelism() ->
CPUs = erlang:system_info(logical_processors_available),
Schedulers = erlang:system_info(schedulers),
min(CPUs, Schedulers).
+
+-spec family([{K,V}]) -> [{K,[V]}].
+
+family(L) ->
+ sofs:to_external(sofs:rel2fam(sofs:relation(L))).
diff --git a/lib/dialyzer/test/race_SUITE_data/src/ets_insert_args1_suppressed.erl b/lib/dialyzer/test/race_SUITE_data/src/ets_insert_args1_suppressed.erl
new file mode 100644
index 0000000000..5134cc6f0b
--- /dev/null
+++ b/lib/dialyzer/test/race_SUITE_data/src/ets_insert_args1_suppressed.erl
@@ -0,0 +1,19 @@
+%% This tests the presence of possible races due to an ets:lookup/ets:insert
+%% combination. It takes into account the argument types of the calls.
+
+-module(ets_insert_args1_suppressed).
+-export([start/0]).
+
+-dialyzer({nowarn_function,start/0}).
+
+start() ->
+ F = fun(T) -> [{_, N}] = ets:lookup(T, counter),
+ ets:insert(T, [{counter, N+1}])
+ end,
+ io:format("Created ~w\n", [ets:new(foo, [named_table, public])]),
+ ets:insert(foo, {counter, 0}),
+ io:format("Inserted ~w\n", [{counter, 0}]),
+ F(foo),
+ io:format("Update complete\n", []),
+ ObjectList = ets:lookup(foo, counter),
+ io:format("Counter: ~w\n", [ObjectList]).
diff --git a/lib/dialyzer/test/small_SUITE_data/results/blame_contract_range_suppressed b/lib/dialyzer/test/small_SUITE_data/results/blame_contract_range_suppressed
new file mode 100644
index 0000000000..40733434f6
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/results/blame_contract_range_suppressed
@@ -0,0 +1,2 @@
+
+blame_contract_range_suppressed.erl:8: Function foo/0 has no local return
diff --git a/lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes b/lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes
index fbdd182358..a9fbfb6068 100644
--- a/lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes
+++ b/lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes
@@ -6,23 +6,27 @@ contracts_with_subtypes.erl:135: The call contracts_with_subtypes:rec2({'a','b'}
contracts_with_subtypes.erl:136: The call contracts_with_subtypes:rec2({'b','a'}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab())
contracts_with_subtypes.erl:137: The call contracts_with_subtypes:rec2({'a',{'b','a'}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab())
contracts_with_subtypes.erl:138: The call contracts_with_subtypes:rec2({'b',{'a','b'}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab())
-contracts_with_subtypes.erl:171: The pattern 1 can never match the type string()
-contracts_with_subtypes.erl:174: The pattern 'alpha' can never match the type {'ok',_} | {'ok',_,string()}
-contracts_with_subtypes.erl:176: The pattern 42 can never match the type {'ok',_} | {'ok',_,string()}
-contracts_with_subtypes.erl:192: The pattern 'alpha' can never match the type {'ok',_}
-contracts_with_subtypes.erl:194: The pattern 42 can never match the type {'ok',_}
-contracts_with_subtypes.erl:212: The pattern 'alpha' can never match the type {'ok',_}
-contracts_with_subtypes.erl:214: The pattern 42 can never match the type {'ok',_}
-contracts_with_subtypes.erl:231: The pattern 1 can never match the type string()
-contracts_with_subtypes.erl:234: The pattern {'ok', _} can never match the type {'ok',_,string()}
-contracts_with_subtypes.erl:235: The pattern 'alpha' can never match the type {'ok',_,string()}
-contracts_with_subtypes.erl:236: The pattern {'ok', 42} can never match the type {'ok',_,string()}
-contracts_with_subtypes.erl:237: The pattern 42 can never match the type {'ok',_,string()}
+contracts_with_subtypes.erl:139: The call contracts_with_subtypes:rec2({'a',{'b',{'a','b'}}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab())
+contracts_with_subtypes.erl:140: The call contracts_with_subtypes:rec2({'b',{'a',{'b','a'}}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab())
+contracts_with_subtypes.erl:141: The call contracts_with_subtypes:rec2({'a',{'b',{'a',{'b','a'}}}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab())
+contracts_with_subtypes.erl:142: The call contracts_with_subtypes:rec2({'b',{'a',{'b',{'a','b'}}}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab())
+contracts_with_subtypes.erl:175: The pattern 1 can never match the type string()
+contracts_with_subtypes.erl:178: The pattern 'alpha' can never match the type {'ok',_} | {'ok',_,string()}
+contracts_with_subtypes.erl:180: The pattern 42 can never match the type {'ok',_} | {'ok',_,string()}
+contracts_with_subtypes.erl:196: The pattern 'alpha' can never match the type {'ok',_}
+contracts_with_subtypes.erl:198: The pattern 42 can never match the type {'ok',_}
+contracts_with_subtypes.erl:216: The pattern 'alpha' can never match the type {'ok',_}
+contracts_with_subtypes.erl:218: The pattern 42 can never match the type {'ok',_}
+contracts_with_subtypes.erl:235: The pattern 1 can never match the type string()
+contracts_with_subtypes.erl:238: The pattern {'ok', _} can never match the type {'ok',_,string()}
+contracts_with_subtypes.erl:239: The pattern 'alpha' can never match the type {'ok',_,string()}
contracts_with_subtypes.erl:23: Invalid type specification for function contracts_with_subtypes:extract2/0. The success typing is () -> 'something'
-contracts_with_subtypes.erl:263: Function flat_ets_new_t/0 has no local return
-contracts_with_subtypes.erl:264: The call contracts_with_subtypes:flat_ets_new(12,[]) breaks the contract (Name,Options) -> atom() when is_subtype(Name,atom()), is_subtype(Options,[Option]), is_subtype(Option,'set' | 'ordered_set' | 'bag' | 'duplicate_bag' | 'public' | 'protected' | 'private' | 'named_table' | {'keypos',integer()} | {'heir',pid(),term()} | {'heir','none'} | {'write_concurrency',boolean()} | {'read_concurrency',boolean()} | 'compressed')
-contracts_with_subtypes.erl:290: Function factored_ets_new_t/0 has no local return
-contracts_with_subtypes.erl:291: The call contracts_with_subtypes:factored_ets_new(12,[]) breaks the contract (Name,Options) -> atom() when is_subtype(Name,atom()), is_subtype(Options,[Option]), is_subtype(Option,Type | Access | 'named_table' | {'keypos',Pos} | {'heir',Pid::pid(),HeirData} | {'heir','none'} | Tweaks), is_subtype(Type,type()), is_subtype(Access,access()), is_subtype(Tweaks,{'write_concurrency',boolean()} | {'read_concurrency',boolean()} | 'compressed'), is_subtype(Pos,pos_integer()), is_subtype(HeirData,term())
+contracts_with_subtypes.erl:240: The pattern {'ok', 42} can never match the type {'ok',_,string()}
+contracts_with_subtypes.erl:241: The pattern 42 can never match the type {'ok',_,string()}
+contracts_with_subtypes.erl:267: Function flat_ets_new_t/0 has no local return
+contracts_with_subtypes.erl:268: The call contracts_with_subtypes:flat_ets_new(12,[]) breaks the contract (Name,Options) -> atom() when is_subtype(Name,atom()), is_subtype(Options,[Option]), is_subtype(Option,'set' | 'ordered_set' | 'bag' | 'duplicate_bag' | 'public' | 'protected' | 'private' | 'named_table' | {'keypos',integer()} | {'heir',pid(),term()} | {'heir','none'} | {'write_concurrency',boolean()} | {'read_concurrency',boolean()} | 'compressed')
+contracts_with_subtypes.erl:294: Function factored_ets_new_t/0 has no local return
+contracts_with_subtypes.erl:295: The call contracts_with_subtypes:factored_ets_new(12,[]) breaks the contract (Name,Options) -> atom() when is_subtype(Name,atom()), is_subtype(Options,[Option]), is_subtype(Option,Type | Access | 'named_table' | {'keypos',Pos} | {'heir',Pid::pid(),HeirData} | {'heir','none'} | Tweaks), is_subtype(Type,type()), is_subtype(Access,access()), is_subtype(Tweaks,{'write_concurrency',boolean()} | {'read_concurrency',boolean()} | 'compressed'), is_subtype(Pos,pos_integer()), is_subtype(HeirData,term())
contracts_with_subtypes.erl:77: The call contracts_with_subtypes:foo1(5) breaks the contract (Arg1) -> Res when is_subtype(Arg1,atom()), is_subtype(Res,atom())
contracts_with_subtypes.erl:78: The call contracts_with_subtypes:foo2(5) breaks the contract (Arg1) -> Res when is_subtype(Arg1,Arg2), is_subtype(Arg2,atom()), is_subtype(Res,atom())
contracts_with_subtypes.erl:79: The call contracts_with_subtypes:foo3(5) breaks the contract (Arg1) -> Res when is_subtype(Arg2,atom()), is_subtype(Arg1,Arg2), is_subtype(Res,atom())
diff --git a/lib/dialyzer/test/small_SUITE_data/results/request1 b/lib/dialyzer/test/small_SUITE_data/results/request1
new file mode 100644
index 0000000000..0cf4017403
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/results/request1
@@ -0,0 +1,2 @@
+
+request1.erl:8: Expression produces a value of type {'a','b'}, but this value is unmatched
diff --git a/lib/dialyzer/test/small_SUITE_data/results/suppress_request b/lib/dialyzer/test/small_SUITE_data/results/suppress_request
new file mode 100644
index 0000000000..18e82b7972
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/results/suppress_request
@@ -0,0 +1,6 @@
+
+suppress_request.erl:21: Expression produces a value of type {'a','b'}, but this value is unmatched
+suppress_request.erl:25: Expression produces a value of type {'a','b'}, but this value is unmatched
+suppress_request.erl:35: Function test3_b/0 has no local return
+suppress_request.erl:39: Guard test 2 =:= A::fun((none()) -> no_return()) can never succeed
+suppress_request.erl:7: Type specification suppress_request:test1('a' | 'b') -> 'ok' is a subtype of the success typing: suppress_request:test1('a' | 'b' | 'c') -> 'ok'
diff --git a/lib/dialyzer/test/small_SUITE_data/src/big_external_type.erl b/lib/dialyzer/test/small_SUITE_data/src/big_external_type.erl
new file mode 100644
index 0000000000..91a157b17f
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/big_external_type.erl
@@ -0,0 +1,528 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2001-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%% A stripped version of erl_parse.yrl.
+%%%
+%%% A type for the abstract format with *external* types has been added.
+%%% The type of the abstract format is not up-to-date, but it does not
+%%% matter since the purpose of the type is to stress the conversion
+%%% of type forms to erl_type().
+
+-module(big_external_type).
+
+-export([parse_form/1,parse_exprs/1,parse_term/1]).
+-export([normalise/1,tokens/1,tokens/2]).
+-export([inop_prec/1,preop_prec/1,func_prec/0,max_prec/0]).
+
+-export_type([abstract_clause/0, abstract_expr/0, abstract_form/0,
+ error_info/0]).
+
+%% Start of Abstract Format
+
+-type line() :: erl_scan:line().
+
+-export_type([af_record_index/0, af_record_field/1, af_record_name/0,
+ af_field_name/0, af_function_decl/0]).
+
+-export_type([af_module/0, af_export/0, af_import/0, af_fa_list/0,
+ af_compile/0, af_file/0, af_record_decl/0,
+ af_field_decl/0, af_wild_attribute/0,
+ af_record_update/1, af_catch/0, af_local_call/0,
+ af_remote_call/0, af_args/0, af_local_function/0,
+ af_remote_function/0, af_list_comprehension/0,
+ af_binary_comprehension/0, af_template/0,
+ af_qualifier_seq/0, af_qualifier/0, af_generator/0,
+ af_filter/0, af_block/0, af_if/0, af_case/0, af_try/0,
+ af_clause_seq/0, af_catch_clause_seq/0, af_receive/0,
+ af_local_fun/0, af_remote_fun/0, af_fun/0, af_query/0,
+ af_query_access/0, af_clause/0,
+ af_catch_clause/0, af_catch_pattern/0, af_catch_class/0,
+ af_body/0, af_guard_seq/0, af_guard/0, af_guard_test/0,
+ af_record_access/1, af_guard_call/0,
+ af_remote_guard_call/0, af_pattern/0, af_literal/0,
+ af_atom/0, af_lit_atom/1, af_integer/0, af_float/0,
+ af_string/0, af_match/1, af_variable/0,
+ af_anon_variable/0, af_tuple/1, af_nil/0, af_cons/1,
+ af_bin/1, af_binelement/1, af_binelement_size/0,
+ af_binary_op/1, af_binop/0, af_unary_op/1, af_unop/0]).
+
+-type abstract_form() :: ?MODULE:af_module()
+ | ?MODULE:af_export()
+ | ?MODULE:af_import()
+ | ?MODULE:af_compile()
+ | ?MODULE:af_file()
+ | ?MODULE:af_record_decl()
+ | ?MODULE:af_wild_attribute()
+ | ?MODULE:af_function_decl().
+
+-type af_module() :: {attribute, line(), module, module()}.
+
+-type af_export() :: {attribute, line(), export, ?MODULE:af_fa_list()}.
+
+-type af_import() :: {attribute, line(), import, ?MODULE:af_fa_list()}.
+
+-type af_fa_list() :: [{function(), arity()}].
+
+-type af_compile() :: {attribute, line(), compile, any()}.
+
+-type af_file() :: {attribute, line(), file, {string(), line()}}.
+
+-type af_record_decl() ::
+ {attribute, line(), record, ?MODULE:af_record_name(), [?MODULE:af_field_decl()]}.
+
+-type af_field_decl() :: {record_field, line(), ?MODULE:af_atom()}
+ | {record_field, line(), ?MODULE:af_atom(), ?MODULE:abstract_expr()}.
+
+%% Types and specs, among other things...
+-type af_wild_attribute() :: {attribute, line(), ?MODULE:af_atom(), any()}.
+
+-type af_function_decl() ::
+ {function, line(), function(), arity(), ?MODULE:af_clause_seq()}.
+
+-type abstract_expr() :: ?MODULE:af_literal()
+ | ?MODULE:af_match(?MODULE:abstract_expr())
+ | ?MODULE:af_variable()
+ | ?MODULE:af_tuple(?MODULE:abstract_expr())
+ | ?MODULE:af_nil()
+ | ?MODULE:af_cons(?MODULE:abstract_expr())
+ | ?MODULE:af_bin(?MODULE:abstract_expr())
+ | ?MODULE:af_binary_op(?MODULE:abstract_expr())
+ | ?MODULE:af_unary_op(?MODULE:abstract_expr())
+ | ?MODULE:af_record_access(?MODULE:abstract_expr())
+ | ?MODULE:af_record_update(?MODULE:abstract_expr())
+ | ?MODULE:af_record_index()
+ | ?MODULE:af_record_field(?MODULE:abstract_expr())
+ | ?MODULE:af_catch()
+ | ?MODULE:af_local_call()
+ | ?MODULE:af_remote_call()
+ | ?MODULE:af_list_comprehension()
+ | ?MODULE:af_binary_comprehension()
+ | ?MODULE:af_block()
+ | ?MODULE:af_if()
+ | ?MODULE:af_case()
+ | ?MODULE:af_try()
+ | ?MODULE:af_receive()
+ | ?MODULE:af_local_fun()
+ | ?MODULE:af_remote_fun()
+ | ?MODULE:af_fun()
+ | ?MODULE:af_query()
+ | ?MODULE:af_query_access().
+
+-type af_record_update(T) :: {record,
+ line(),
+ ?MODULE:abstract_expr(),
+ ?MODULE:af_record_name(),
+ [?MODULE:af_record_field(T)]}.
+
+-type af_catch() :: {'catch', line(), ?MODULE:abstract_expr()}.
+
+-type af_local_call() :: {call, line(), ?MODULE:af_local_function(), ?MODULE:af_args()}.
+
+-type af_remote_call() :: {call, line(), ?MODULE:af_remote_function(), ?MODULE:af_args()}.
+
+-type af_args() :: [?MODULE:abstract_expr()].
+
+-type af_local_function() :: ?MODULE:abstract_expr().
+
+-type af_remote_function() ::
+ {remote, line(), ?MODULE:abstract_expr(), ?MODULE:abstract_expr()}.
+
+-type af_list_comprehension() ::
+ {lc, line(), ?MODULE:af_template(), ?MODULE:af_qualifier_seq()}.
+
+-type af_binary_comprehension() ::
+ {bc, line(), ?MODULE:af_template(), ?MODULE:af_qualifier_seq()}.
+
+-type af_template() :: ?MODULE:abstract_expr().
+
+-type af_qualifier_seq() :: [?MODULE:af_qualifier()].
+
+-type af_qualifier() :: ?MODULE:af_generator() | ?MODULE:af_filter().
+
+-type af_generator() :: {generate, line(), ?MODULE:af_pattern(), ?MODULE:abstract_expr()}
+ | {b_generate, line(), ?MODULE:af_pattern(), ?MODULE:abstract_expr()}.
+
+-type af_filter() :: ?MODULE:abstract_expr().
+
+-type af_block() :: {block, line(), ?MODULE:af_body()}.
+
+-type af_if() :: {'if', line(), ?MODULE:af_clause_seq()}.
+
+-type af_case() :: {'case', line(), ?MODULE:abstract_expr(), ?MODULE:af_clause_seq()}.
+
+-type af_try() :: {'try',
+ line(),
+ ?MODULE:af_body(),
+ ?MODULE:af_clause_seq(),
+ ?MODULE:af_catch_clause_seq(),
+ ?MODULE:af_body()}.
+
+-type af_clause_seq() :: [?MODULE:af_clause(), ...].
+
+-type af_catch_clause_seq() :: [?MODULE:af_clause(), ...].
+
+-type af_receive() ::
+ {'receive', line(), ?MODULE:af_clause_seq()}
+ | {'receive', line(), ?MODULE:af_clause_seq(), ?MODULE:abstract_expr(), ?MODULE:af_body()}.
+
+-type af_local_fun() :: {'fun', line(), {function, function(), arity()}}.
+
+-type af_remote_fun() ::
+ {'fun', line(), {function, module(), function(), arity()}}
+ | {'fun', line(), {function, ?MODULE:af_atom(), ?MODULE:af_atom(), ?MODULE:af_integer()}}.
+
+-type af_fun() :: {'fun', line(), {clauses, ?MODULE:af_clause_seq()}}.
+
+-type af_query() :: {'query', line(), ?MODULE:af_list_comprehension()}.
+
+-type af_query_access() ::
+ {record_field, line(), ?MODULE:abstract_expr(), ?MODULE:af_field_name()}.
+
+-type abstract_clause() :: ?MODULE:af_clause() | ?MODULE:af_catch_clause().
+
+-type af_clause() ::
+ {clause, line(), [?MODULE:af_pattern()], ?MODULE:af_guard_seq(), ?MODULE:af_body()}.
+
+-type af_catch_clause() ::
+ {clause, line(), [?MODULE:af_catch_pattern()], ?MODULE:af_guard_seq(), ?MODULE:af_body()}.
+
+-type af_catch_pattern() ::
+ {?MODULE:af_catch_class(), ?MODULE:af_pattern(), ?MODULE:af_anon_variable()}.
+
+-type af_catch_class() ::
+ ?MODULE:af_variable()
+ | ?MODULE:af_lit_atom(throw) | ?MODULE:af_lit_atom(error) | ?MODULE:af_lit_atom(exit).
+
+-type af_body() :: [?MODULE:abstract_expr(), ...].
+
+-type af_guard_seq() :: [?MODULE:af_guard()].
+
+-type af_guard() :: [?MODULE:af_guard_test(), ...].
+
+-type af_guard_test() :: ?MODULE:af_literal()
+ | ?MODULE:af_variable()
+ | ?MODULE:af_tuple(?MODULE:af_guard_test())
+ | ?MODULE:af_nil()
+ | ?MODULE:af_cons(?MODULE:af_guard_test())
+ | ?MODULE:af_bin(?MODULE:af_guard_test())
+ | ?MODULE:af_binary_op(?MODULE:af_guard_test())
+ | ?MODULE:af_unary_op(?MODULE:af_guard_test())
+ | ?MODULE:af_record_access(?MODULE:af_guard_test())
+ | ?MODULE:af_record_index()
+ | ?MODULE:af_record_field(?MODULE:af_guard_test())
+ | ?MODULE:af_guard_call()
+ | ?MODULE:af_remote_guard_call().
+
+-type af_record_access(T) ::
+ {record, line(), ?MODULE:af_record_name(), [?MODULE:af_record_field(T)]}.
+
+-type af_guard_call() :: {call, line(), function(), [?MODULE:af_guard_test()]}.
+
+-type af_remote_guard_call() ::
+ {call, line(), atom(), ?MODULE:af_lit_atom(erlang), [?MODULE:af_guard_test()]}.
+
+-type af_pattern() :: ?MODULE:af_literal()
+ | ?MODULE:af_match(?MODULE:af_pattern())
+ | ?MODULE:af_variable()
+ | ?MODULE:af_anon_variable()
+ | ?MODULE:af_tuple(?MODULE:af_pattern())
+ | ?MODULE:af_nil()
+ | ?MODULE:af_cons(?MODULE:af_pattern())
+ | ?MODULE:af_bin(?MODULE:af_pattern())
+ | ?MODULE:af_binary_op(?MODULE:af_pattern())
+ | ?MODULE:af_unary_op(?MODULE:af_pattern())
+ | ?MODULE:af_record_index()
+ | ?MODULE:af_record_field(?MODULE:af_pattern()).
+
+-type af_literal() :: ?MODULE:af_atom() | ?MODULE:af_integer() | ?MODULE:af_float() | ?MODULE:af_string().
+
+-type af_atom() :: ?MODULE:af_lit_atom(atom()).
+
+-type af_lit_atom(A) :: {atom, line(), A}.
+
+-type af_integer() :: {integer, line(), non_neg_integer()}.
+
+-type af_float() :: {float, line(), float()}.
+
+-type af_string() :: {string, line(), [byte()]}.
+
+-type af_match(T) :: {match, line(), T, T}.
+
+-type af_variable() :: {var, line(), atom()}.
+
+-type af_anon_variable() :: {var, line(), '_'}.
+
+-type af_tuple(T) :: {tuple, line(), [T]}.
+
+-type af_nil() :: {nil, line()}.
+
+-type af_cons(T) :: {cons, line, T, T}.
+
+-type af_bin(T) :: {bin, line(), [?MODULE:af_binelement(T)]}.
+
+-type af_binelement(T) :: {bin_element,
+ line(),
+ T,
+ ?MODULE:af_binelement_size(),
+ type_specifier_list()}.
+
+-type af_binelement_size() :: default | ?MODULE:abstract_expr().
+
+-type af_binary_op(T) :: {op, line(), T, ?MODULE:af_binop(), T}.
+
+-type af_binop() :: '/' | '*' | 'div' | 'rem' | 'band' | 'and' | '+' | '-'
+ | 'bor' | 'bxor' | 'bsl' | 'bsr' | 'or' | 'xor' | '++'
+ | '--' | '==' | '/=' | '=<' | '<' | '>=' | '>' | '=:='
+ | '=/='.
+
+-type af_unary_op(T) :: {op, line(), ?MODULE:af_unop(), T}.
+
+-type af_unop() :: '+' | '*' | 'bnot' | 'not'.
+
+%% See also lib/stdlib/{src/erl_bits.erl,include/erl_bits.hrl}.
+-type type_specifier_list() :: default | [type_specifier(), ...].
+
+-type type_specifier() :: af_type()
+ | af_signedness()
+ | af_endianness()
+ | af_unit().
+
+-type af_type() :: integer
+ | float
+ | binary
+ | bytes
+ | bitstring
+ | bits
+ | utf8
+ | utf16
+ | utf32.
+
+-type af_signedness() :: signed | unsigned.
+
+-type af_endianness() :: big | little | native.
+
+-type af_unit() :: {unit, 1..256}.
+
+-type af_record_index() ::
+ {record_index, line(), af_record_name(), af_field_name()}.
+
+-type af_record_field(T) :: {record_field, line(), af_field_name(), T}.
+
+-type af_record_name() :: atom().
+
+-type af_field_name() :: atom().
+
+%% End of Abstract Format
+
+-type error_description() :: term().
+-type error_info() :: {erl_scan:line(), module(), error_description()}.
+-type token() :: {Tag :: atom(), Line :: erl_scan:line()}.
+
+%% mkop(Op, Arg) -> {op,Line,Op,Arg}.
+%% mkop(Left, Op, Right) -> {op,Line,Op,Left,Right}.
+
+-define(mkop2(L, OpPos, R),
+ begin
+ {Op,Pos} = OpPos,
+ {op,Pos,Op,L,R}
+ end).
+
+-define(mkop1(OpPos, A),
+ begin
+ {Op,Pos} = OpPos,
+ {op,Pos,Op,A}
+ end).
+
+%% keep track of line info in tokens
+-define(line(Tup), element(2, Tup)).
+
+%% Entry points compatible to old erl_parse.
+%% These really suck and are only here until Calle gets multiple
+%% entry points working.
+
+-spec parse_form(Tokens) -> {ok, AbsForm} | {error, ErrorInfo} when
+ Tokens :: [token()],
+ AbsForm :: abstract_form(),
+ ErrorInfo :: error_info().
+parse_form([{'-',L1},{atom,L2,spec}|Tokens]) ->
+ parse([{'-',L1},{'spec',L2}|Tokens]);
+parse_form([{'-',L1},{atom,L2,callback}|Tokens]) ->
+ parse([{'-',L1},{'callback',L2}|Tokens]);
+parse_form(Tokens) ->
+ parse(Tokens).
+
+-spec parse_exprs(Tokens) -> {ok, ExprList} | {error, ErrorInfo} when
+ Tokens :: [token()],
+ ExprList :: [abstract_expr()],
+ ErrorInfo :: error_info().
+parse_exprs(Tokens) ->
+ case parse([{atom,0,f},{'(',0},{')',0},{'->',0}|Tokens]) of
+ {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],Exprs}]}} ->
+ {ok,Exprs};
+ {error,_} = Err -> Err
+ end.
+
+-spec parse_term(Tokens) -> {ok, Term} | {error, ErrorInfo} when
+ Tokens :: [token()],
+ Term :: term(),
+ ErrorInfo :: error_info().
+parse_term(Tokens) ->
+ case parse([{atom,0,f},{'(',0},{')',0},{'->',0}|Tokens]) of
+ {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],[Expr]}]}} ->
+ try normalise(Expr) of
+ Term -> {ok,Term}
+ catch
+ _:_R -> {error,{?line(Expr),?MODULE,"bad term"}}
+ end;
+ {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],[_E1,E2|_Es]}]}} ->
+ {error,{?line(E2),?MODULE,"bad term"}};
+ {error,_} = Err -> Err
+ end.
+
+%% Convert between the abstract form of a term and a term.
+
+-spec normalise(AbsTerm) -> Data when
+ AbsTerm :: abstract_expr(),
+ Data :: term().
+normalise({char,_,C}) -> C;
+normalise({integer,_,I}) -> I;
+normalise({float,_,F}) -> F;
+normalise({atom,_,A}) -> A;
+normalise({string,_,S}) -> S;
+normalise({nil,_}) -> [];
+normalise({bin,_,Fs}) ->
+ {value, B, _} =
+ eval_bits:expr_grp(Fs, [],
+ fun(E, _) ->
+ {value, normalise(E), []}
+ end, [], true),
+ B;
+normalise({cons,_,Head,Tail}) ->
+ [normalise(Head)|normalise(Tail)];
+normalise({tuple,_,Args}) ->
+ list_to_tuple(normalise_list(Args));
+%% Atom dot-notation, as in 'foo.bar.baz'
+%% Special case for unary +/-.
+normalise({op,_,'+',{char,_,I}}) -> I;
+normalise({op,_,'+',{integer,_,I}}) -> I;
+normalise({op,_,'+',{float,_,F}}) -> F;
+normalise({op,_,'-',{char,_,I}}) -> -I; %Weird, but compatible!
+normalise({op,_,'-',{integer,_,I}}) -> -I;
+normalise({op,_,'-',{float,_,F}}) -> -F;
+normalise(X) -> erlang:error({badarg, X}).
+
+normalise_list([H|T]) ->
+ [normalise(H)|normalise_list(T)];
+normalise_list([]) ->
+ [].
+
+%% Generate a list of tokens representing the abstract term.
+
+-spec tokens(AbsTerm) -> Tokens when
+ AbsTerm :: abstract_expr(),
+ Tokens :: [token()].
+tokens(Abs) ->
+ tokens(Abs, []).
+
+-spec tokens(AbsTerm, MoreTokens) -> Tokens when
+ AbsTerm :: abstract_expr(),
+ MoreTokens :: [token()],
+ Tokens :: [token()].
+tokens({char,L,C}, More) -> [{char,L,C}|More];
+tokens({integer,L,N}, More) -> [{integer,L,N}|More];
+tokens({float,L,F}, More) -> [{float,L,F}|More];
+tokens({atom,L,A}, More) -> [{atom,L,A}|More];
+tokens({var,L,V}, More) -> [{var,L,V}|More];
+tokens({string,L,S}, More) -> [{string,L,S}|More];
+tokens({nil,L}, More) -> [{'[',L},{']',L}|More];
+tokens({cons,L,Head,Tail}, More) ->
+ [{'[',L}|tokens(Head, tokens_tail(Tail, More))];
+tokens({tuple,L,[]}, More) ->
+ [{'{',L},{'}',L}|More];
+tokens({tuple,L,[E|Es]}, More) ->
+ [{'{',L}|tokens(E, tokens_tuple(Es, ?line(E), More))].
+
+tokens_tail({cons,L,Head,Tail}, More) ->
+ [{',',L}|tokens(Head, tokens_tail(Tail, More))];
+tokens_tail({nil,L}, More) ->
+ [{']',L}|More];
+tokens_tail(Other, More) ->
+ L = ?line(Other),
+ [{'|',L}|tokens(Other, [{']',L}|More])].
+
+tokens_tuple([E|Es], Line, More) ->
+ [{',',Line}|tokens(E, tokens_tuple(Es, ?line(E), More))];
+tokens_tuple([], Line, More) ->
+ [{'}',Line}|More].
+
+%% Give the relative precedences of operators.
+
+inop_prec('=') -> {150,100,100};
+inop_prec('!') -> {150,100,100};
+inop_prec('orelse') -> {160,150,150};
+inop_prec('andalso') -> {200,160,160};
+inop_prec('==') -> {300,200,300};
+inop_prec('/=') -> {300,200,300};
+inop_prec('=<') -> {300,200,300};
+inop_prec('<') -> {300,200,300};
+inop_prec('>=') -> {300,200,300};
+inop_prec('>') -> {300,200,300};
+inop_prec('=:=') -> {300,200,300};
+inop_prec('=/=') -> {300,200,300};
+inop_prec('++') -> {400,300,300};
+inop_prec('--') -> {400,300,300};
+inop_prec('+') -> {400,400,500};
+inop_prec('-') -> {400,400,500};
+inop_prec('bor') -> {400,400,500};
+inop_prec('bxor') -> {400,400,500};
+inop_prec('bsl') -> {400,400,500};
+inop_prec('bsr') -> {400,400,500};
+inop_prec('or') -> {400,400,500};
+inop_prec('xor') -> {400,400,500};
+inop_prec('*') -> {500,500,600};
+inop_prec('/') -> {500,500,600};
+inop_prec('div') -> {500,500,600};
+inop_prec('rem') -> {500,500,600};
+inop_prec('band') -> {500,500,600};
+inop_prec('and') -> {500,500,600};
+inop_prec('#') -> {800,700,800};
+inop_prec(':') -> {900,800,900};
+inop_prec('.') -> {900,900,1000}.
+
+-type pre_op() :: 'catch' | '+' | '-' | 'bnot' | 'not' | '#'.
+
+-spec preop_prec(pre_op()) -> {0 | 600 | 700, 100 | 700 | 800}.
+
+preop_prec('catch') -> {0,100};
+preop_prec('+') -> {600,700};
+preop_prec('-') -> {600,700};
+preop_prec('bnot') -> {600,700};
+preop_prec('not') -> {600,700};
+preop_prec('#') -> {700,800}.
+
+-spec func_prec() -> {800,700}.
+
+func_prec() -> {800,700}.
+
+-spec max_prec() -> 1000.
+
+max_prec() -> 1000.
+
+parse(T) ->
+ bar:foo(T).
diff --git a/lib/dialyzer/test/small_SUITE_data/src/big_local_type.erl b/lib/dialyzer/test/small_SUITE_data/src/big_local_type.erl
new file mode 100644
index 0000000000..6de263eda1
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/big_local_type.erl
@@ -0,0 +1,525 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2001-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%% A stripped version of erl_parse.yrl.
+%%%
+%%% A type for the abstract format with *local* types has been added.
+%%% The type of the abstract format is not up-to-date, but it does not
+%%% matter since the purpose of the type is to stress the conversion
+%%% of type forms to erl_type().
+
+-module(big_local_type).
+
+-export([parse_form/1,parse_exprs/1,parse_term/1]).
+-export([normalise/1,tokens/1,tokens/2]).
+-export([inop_prec/1,preop_prec/1,func_prec/0,max_prec/0]).
+
+-export_type([abstract_clause/0, abstract_expr/0, abstract_form/0,
+ error_info/0]).
+
+%% Start of Abstract Format
+
+-type line() :: erl_scan:line().
+
+-export_type([af_module/0, af_export/0, af_import/0, af_fa_list/0,
+ af_compile/0, af_file/0, af_record_decl/0,
+ af_field_decl/0, af_wild_attribute/0,
+ af_record_update/1, af_catch/0, af_local_call/0,
+ af_remote_call/0, af_args/0, af_local_function/0,
+ af_remote_function/0, af_list_comprehension/0,
+ af_binary_comprehension/0, af_template/0,
+ af_qualifier_seq/0, af_qualifier/0, af_generator/0,
+ af_filter/0, af_block/0, af_if/0, af_case/0, af_try/0,
+ af_clause_seq/0, af_catch_clause_seq/0, af_receive/0,
+ af_local_fun/0, af_remote_fun/0, af_fun/0, af_query/0,
+ af_query_access/0, af_clause/0,
+ af_catch_clause/0, af_catch_pattern/0, af_catch_class/0,
+ af_body/0, af_guard_seq/0, af_guard/0, af_guard_test/0,
+ af_record_access/1, af_guard_call/0,
+ af_remote_guard_call/0, af_pattern/0, af_literal/0,
+ af_atom/0, af_lit_atom/1, af_integer/0, af_float/0,
+ af_string/0, af_match/1, af_variable/0,
+ af_anon_variable/0, af_tuple/1, af_nil/0, af_cons/1,
+ af_bin/1, af_binelement/1, af_binelement_size/0,
+ af_binary_op/1, af_binop/0, af_unary_op/1, af_unop/0]).
+
+-type abstract_form() :: af_module()
+ | af_export()
+ | af_import()
+ | af_compile()
+ | af_file()
+ | af_record_decl()
+ | af_wild_attribute()
+ | af_function_decl().
+
+-type af_module() :: {attribute, line(), module, module()}.
+
+-type af_export() :: {attribute, line(), export, af_fa_list()}.
+
+-type af_import() :: {attribute, line(), import, af_fa_list()}.
+
+-type af_fa_list() :: [{function(), arity()}].
+
+-type af_compile() :: {attribute, line(), compile, any()}.
+
+-type af_file() :: {attribute, line(), file, {string(), line()}}.
+
+-type af_record_decl() ::
+ {attribute, line(), record, af_record_name(), [af_field_decl()]}.
+
+-type af_field_decl() :: {record_field, line(), af_atom()}
+ | {record_field, line(), af_atom(), abstract_expr()}.
+
+%% Types and specs, among other things...
+-type af_wild_attribute() :: {attribute, line(), af_atom(), any()}.
+
+-type af_function_decl() ::
+ {function, line(), function(), arity(), af_clause_seq()}.
+
+-type abstract_expr() :: af_literal()
+ | af_match(abstract_expr())
+ | af_variable()
+ | af_tuple(abstract_expr())
+ | af_nil()
+ | af_cons(abstract_expr())
+ | af_bin(abstract_expr())
+ | af_binary_op(abstract_expr())
+ | af_unary_op(abstract_expr())
+ | af_record_access(abstract_expr())
+ | af_record_update(abstract_expr())
+ | af_record_index()
+ | af_record_field(abstract_expr())
+ | af_catch()
+ | af_local_call()
+ | af_remote_call()
+ | af_list_comprehension()
+ | af_binary_comprehension()
+ | af_block()
+ | af_if()
+ | af_case()
+ | af_try()
+ | af_receive()
+ | af_local_fun()
+ | af_remote_fun()
+ | af_fun()
+ | af_query()
+ | af_query_access().
+
+-type af_record_update(T) :: {record,
+ line(),
+ abstract_expr(),
+ af_record_name(),
+ [af_record_field(T)]}.
+
+-type af_catch() :: {'catch', line(), abstract_expr()}.
+
+-type af_local_call() :: {call, line(), af_local_function(), af_args()}.
+
+-type af_remote_call() :: {call, line(), af_remote_function(), af_args()}.
+
+-type af_args() :: [abstract_expr()].
+
+-type af_local_function() :: abstract_expr().
+
+-type af_remote_function() ::
+ {remote, line(), abstract_expr(), abstract_expr()}.
+
+-type af_list_comprehension() ::
+ {lc, line(), af_template(), af_qualifier_seq()}.
+
+-type af_binary_comprehension() ::
+ {bc, line(), af_template(), af_qualifier_seq()}.
+
+-type af_template() :: abstract_expr().
+
+-type af_qualifier_seq() :: [af_qualifier()].
+
+-type af_qualifier() :: af_generator() | af_filter().
+
+-type af_generator() :: {generate, line(), af_pattern(), abstract_expr()}
+ | {b_generate, line(), af_pattern(), abstract_expr()}.
+
+-type af_filter() :: abstract_expr().
+
+-type af_block() :: {block, line(), af_body()}.
+
+-type af_if() :: {'if', line(), af_clause_seq()}.
+
+-type af_case() :: {'case', line(), abstract_expr(), af_clause_seq()}.
+
+-type af_try() :: {'try',
+ line(),
+ af_body(),
+ af_clause_seq(),
+ af_catch_clause_seq(),
+ af_body()}.
+
+-type af_clause_seq() :: [af_clause(), ...].
+
+-type af_catch_clause_seq() :: [af_clause(), ...].
+
+-type af_receive() ::
+ {'receive', line(), af_clause_seq()}
+ | {'receive', line(), af_clause_seq(), abstract_expr(), af_body()}.
+
+-type af_local_fun() :: {'fun', line(), {function, function(), arity()}}.
+
+-type af_remote_fun() ::
+ {'fun', line(), {function, module(), function(), arity()}}
+ | {'fun', line(), {function, af_atom(), af_atom(), af_integer()}}.
+
+-type af_fun() :: {'fun', line(), {clauses, af_clause_seq()}}.
+
+-type af_query() :: {'query', line(), af_list_comprehension()}.
+
+-type af_query_access() ::
+ {record_field, line(), abstract_expr(), af_field_name()}.
+
+-type abstract_clause() :: af_clause() | af_catch_clause().
+
+-type af_clause() ::
+ {clause, line(), [af_pattern()], af_guard_seq(), af_body()}.
+
+-type af_catch_clause() ::
+ {clause, line(), [af_catch_pattern()], af_guard_seq(), af_body()}.
+
+-type af_catch_pattern() ::
+ {af_catch_class(), af_pattern(), af_anon_variable()}.
+
+-type af_catch_class() ::
+ af_variable()
+ | af_lit_atom(throw) | af_lit_atom(error) | af_lit_atom(exit).
+
+-type af_body() :: [abstract_expr(), ...].
+
+-type af_guard_seq() :: [af_guard()].
+
+-type af_guard() :: [af_guard_test(), ...].
+
+-type af_guard_test() :: af_literal()
+ | af_variable()
+ | af_tuple(af_guard_test())
+ | af_nil()
+ | af_cons(af_guard_test())
+ | af_bin(af_guard_test())
+ | af_binary_op(af_guard_test())
+ | af_unary_op(af_guard_test())
+ | af_record_access(af_guard_test())
+ | af_record_index()
+ | af_record_field(af_guard_test())
+ | af_guard_call()
+ | af_remote_guard_call().
+
+-type af_record_access(T) ::
+ {record, line(), af_record_name(), [af_record_field(T)]}.
+
+-type af_guard_call() :: {call, line(), function(), [af_guard_test()]}.
+
+-type af_remote_guard_call() ::
+ {call, line(), atom(), af_lit_atom(erlang), [af_guard_test()]}.
+
+-type af_pattern() :: af_literal()
+ | af_match(af_pattern())
+ | af_variable()
+ | af_anon_variable()
+ | af_tuple(af_pattern())
+ | af_nil()
+ | af_cons(af_pattern())
+ | af_bin(af_pattern())
+ | af_binary_op(af_pattern())
+ | af_unary_op(af_pattern())
+ | af_record_index()
+ | af_record_field(af_pattern()).
+
+-type af_literal() :: af_atom() | af_integer() | af_float() | af_string().
+
+-type af_atom() :: af_lit_atom(atom()).
+
+-type af_lit_atom(A) :: {atom, line(), A}.
+
+-type af_integer() :: {integer, line(), non_neg_integer()}.
+
+-type af_float() :: {float, line(), float()}.
+
+-type af_string() :: {string, line(), [byte()]}.
+
+-type af_match(T) :: {match, line(), T, T}.
+
+-type af_variable() :: {var, line(), atom()}.
+
+-type af_anon_variable() :: {var, line(), '_'}.
+
+-type af_tuple(T) :: {tuple, line(), [T]}.
+
+-type af_nil() :: {nil, line()}.
+
+-type af_cons(T) :: {cons, line, T, T}.
+
+-type af_bin(T) :: {bin, line(), [af_binelement(T)]}.
+
+-type af_binelement(T) :: {bin_element,
+ line(),
+ T,
+ af_binelement_size(),
+ type_specifier_list()}.
+
+-type af_binelement_size() :: default | abstract_expr().
+
+-type af_binary_op(T) :: {op, line(), T, af_binop(), T}.
+
+-type af_binop() :: '/' | '*' | 'div' | 'rem' | 'band' | 'and' | '+' | '-'
+ | 'bor' | 'bxor' | 'bsl' | 'bsr' | 'or' | 'xor' | '++'
+ | '--' | '==' | '/=' | '=<' | '<' | '>=' | '>' | '=:='
+ | '=/='.
+
+-type af_unary_op(T) :: {op, line(), af_unop(), T}.
+
+-type af_unop() :: '+' | '*' | 'bnot' | 'not'.
+
+%% See also lib/stdlib/{src/erl_bits.erl,include/erl_bits.hrl}.
+-type type_specifier_list() :: default | [type_specifier(), ...].
+
+-type type_specifier() :: af_type()
+ | af_signedness()
+ | af_endianness()
+ | af_unit().
+
+-type af_type() :: integer
+ | float
+ | binary
+ | bytes
+ | bitstring
+ | bits
+ | utf8
+ | utf16
+ | utf32.
+
+-type af_signedness() :: signed | unsigned.
+
+-type af_endianness() :: big | little | native.
+
+-type af_unit() :: {unit, 1..256}.
+
+-type af_record_index() ::
+ {record_index, line(), af_record_name(), af_field_name()}.
+
+-type af_record_field(T) :: {record_field, line(), af_field_name(), T}.
+
+-type af_record_name() :: atom().
+
+-type af_field_name() :: atom().
+
+%% End of Abstract Format
+
+-type error_description() :: term().
+-type error_info() :: {erl_scan:line(), module(), error_description()}.
+-type token() :: {Tag :: atom(), Line :: erl_scan:line()}.
+
+%% mkop(Op, Arg) -> {op,Line,Op,Arg}.
+%% mkop(Left, Op, Right) -> {op,Line,Op,Left,Right}.
+
+-define(mkop2(L, OpPos, R),
+ begin
+ {Op,Pos} = OpPos,
+ {op,Pos,Op,L,R}
+ end).
+
+-define(mkop1(OpPos, A),
+ begin
+ {Op,Pos} = OpPos,
+ {op,Pos,Op,A}
+ end).
+
+%% keep track of line info in tokens
+-define(line(Tup), element(2, Tup)).
+
+%% Entry points compatible to old erl_parse.
+%% These really suck and are only here until Calle gets multiple
+%% entry points working.
+
+-spec parse_form(Tokens) -> {ok, AbsForm} | {error, ErrorInfo} when
+ Tokens :: [token()],
+ AbsForm :: abstract_form(),
+ ErrorInfo :: error_info().
+parse_form([{'-',L1},{atom,L2,spec}|Tokens]) ->
+ parse([{'-',L1},{'spec',L2}|Tokens]);
+parse_form([{'-',L1},{atom,L2,callback}|Tokens]) ->
+ parse([{'-',L1},{'callback',L2}|Tokens]);
+parse_form(Tokens) ->
+ parse(Tokens).
+
+-spec parse_exprs(Tokens) -> {ok, ExprList} | {error, ErrorInfo} when
+ Tokens :: [token()],
+ ExprList :: [abstract_expr()],
+ ErrorInfo :: error_info().
+parse_exprs(Tokens) ->
+ case parse([{atom,0,f},{'(',0},{')',0},{'->',0}|Tokens]) of
+ {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],Exprs}]}} ->
+ {ok,Exprs};
+ {error,_} = Err -> Err
+ end.
+
+-spec parse_term(Tokens) -> {ok, Term} | {error, ErrorInfo} when
+ Tokens :: [token()],
+ Term :: term(),
+ ErrorInfo :: error_info().
+parse_term(Tokens) ->
+ case parse([{atom,0,f},{'(',0},{')',0},{'->',0}|Tokens]) of
+ {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],[Expr]}]}} ->
+ try normalise(Expr) of
+ Term -> {ok,Term}
+ catch
+ _:_R -> {error,{?line(Expr),?MODULE,"bad term"}}
+ end;
+ {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],[_E1,E2|_Es]}]}} ->
+ {error,{?line(E2),?MODULE,"bad term"}};
+ {error,_} = Err -> Err
+ end.
+
+%% Convert between the abstract form of a term and a term.
+
+-spec normalise(AbsTerm) -> Data when
+ AbsTerm :: abstract_expr(),
+ Data :: term().
+normalise({char,_,C}) -> C;
+normalise({integer,_,I}) -> I;
+normalise({float,_,F}) -> F;
+normalise({atom,_,A}) -> A;
+normalise({string,_,S}) -> S;
+normalise({nil,_}) -> [];
+normalise({bin,_,Fs}) ->
+ {value, B, _} =
+ eval_bits:expr_grp(Fs, [],
+ fun(E, _) ->
+ {value, normalise(E), []}
+ end, [], true),
+ B;
+normalise({cons,_,Head,Tail}) ->
+ [normalise(Head)|normalise(Tail)];
+normalise({tuple,_,Args}) ->
+ list_to_tuple(normalise_list(Args));
+%% Atom dot-notation, as in 'foo.bar.baz'
+%% Special case for unary +/-.
+normalise({op,_,'+',{char,_,I}}) -> I;
+normalise({op,_,'+',{integer,_,I}}) -> I;
+normalise({op,_,'+',{float,_,F}}) -> F;
+normalise({op,_,'-',{char,_,I}}) -> -I; %Weird, but compatible!
+normalise({op,_,'-',{integer,_,I}}) -> -I;
+normalise({op,_,'-',{float,_,F}}) -> -F;
+normalise(X) -> erlang:error({badarg, X}).
+
+normalise_list([H|T]) ->
+ [normalise(H)|normalise_list(T)];
+normalise_list([]) ->
+ [].
+
+%% Generate a list of tokens representing the abstract term.
+
+-spec tokens(AbsTerm) -> Tokens when
+ AbsTerm :: abstract_expr(),
+ Tokens :: [token()].
+tokens(Abs) ->
+ tokens(Abs, []).
+
+-spec tokens(AbsTerm, MoreTokens) -> Tokens when
+ AbsTerm :: abstract_expr(),
+ MoreTokens :: [token()],
+ Tokens :: [token()].
+tokens({char,L,C}, More) -> [{char,L,C}|More];
+tokens({integer,L,N}, More) -> [{integer,L,N}|More];
+tokens({float,L,F}, More) -> [{float,L,F}|More];
+tokens({atom,L,A}, More) -> [{atom,L,A}|More];
+tokens({var,L,V}, More) -> [{var,L,V}|More];
+tokens({string,L,S}, More) -> [{string,L,S}|More];
+tokens({nil,L}, More) -> [{'[',L},{']',L}|More];
+tokens({cons,L,Head,Tail}, More) ->
+ [{'[',L}|tokens(Head, tokens_tail(Tail, More))];
+tokens({tuple,L,[]}, More) ->
+ [{'{',L},{'}',L}|More];
+tokens({tuple,L,[E|Es]}, More) ->
+ [{'{',L}|tokens(E, tokens_tuple(Es, ?line(E), More))].
+
+tokens_tail({cons,L,Head,Tail}, More) ->
+ [{',',L}|tokens(Head, tokens_tail(Tail, More))];
+tokens_tail({nil,L}, More) ->
+ [{']',L}|More];
+tokens_tail(Other, More) ->
+ L = ?line(Other),
+ [{'|',L}|tokens(Other, [{']',L}|More])].
+
+tokens_tuple([E|Es], Line, More) ->
+ [{',',Line}|tokens(E, tokens_tuple(Es, ?line(E), More))];
+tokens_tuple([], Line, More) ->
+ [{'}',Line}|More].
+
+%% Give the relative precedences of operators.
+
+inop_prec('=') -> {150,100,100};
+inop_prec('!') -> {150,100,100};
+inop_prec('orelse') -> {160,150,150};
+inop_prec('andalso') -> {200,160,160};
+inop_prec('==') -> {300,200,300};
+inop_prec('/=') -> {300,200,300};
+inop_prec('=<') -> {300,200,300};
+inop_prec('<') -> {300,200,300};
+inop_prec('>=') -> {300,200,300};
+inop_prec('>') -> {300,200,300};
+inop_prec('=:=') -> {300,200,300};
+inop_prec('=/=') -> {300,200,300};
+inop_prec('++') -> {400,300,300};
+inop_prec('--') -> {400,300,300};
+inop_prec('+') -> {400,400,500};
+inop_prec('-') -> {400,400,500};
+inop_prec('bor') -> {400,400,500};
+inop_prec('bxor') -> {400,400,500};
+inop_prec('bsl') -> {400,400,500};
+inop_prec('bsr') -> {400,400,500};
+inop_prec('or') -> {400,400,500};
+inop_prec('xor') -> {400,400,500};
+inop_prec('*') -> {500,500,600};
+inop_prec('/') -> {500,500,600};
+inop_prec('div') -> {500,500,600};
+inop_prec('rem') -> {500,500,600};
+inop_prec('band') -> {500,500,600};
+inop_prec('and') -> {500,500,600};
+inop_prec('#') -> {800,700,800};
+inop_prec(':') -> {900,800,900};
+inop_prec('.') -> {900,900,1000}.
+
+-type pre_op() :: 'catch' | '+' | '-' | 'bnot' | 'not' | '#'.
+
+-spec preop_prec(pre_op()) -> {0 | 600 | 700, 100 | 700 | 800}.
+
+preop_prec('catch') -> {0,100};
+preop_prec('+') -> {600,700};
+preop_prec('-') -> {600,700};
+preop_prec('bnot') -> {600,700};
+preop_prec('not') -> {600,700};
+preop_prec('#') -> {700,800}.
+
+-spec func_prec() -> {800,700}.
+
+func_prec() -> {800,700}.
+
+-spec max_prec() -> 1000.
+
+max_prec() -> 1000.
+
+parse(T) ->
+ bar:foo(T).
diff --git a/lib/dialyzer/test/small_SUITE_data/src/blame_contract_range_suppressed.erl b/lib/dialyzer/test/small_SUITE_data/src/blame_contract_range_suppressed.erl
new file mode 100644
index 0000000000..8b66d35083
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/blame_contract_range_suppressed.erl
@@ -0,0 +1,15 @@
+%%-----------------------------------------------------------------------
+%% Like ./blame_contract_range.erl, but warning is suppressed.
+%%-----------------------------------------------------------------------
+-module(blame_contract_range_suppressed).
+
+-export([foo/0]).
+
+foo() ->
+ bar(b).
+
+-dialyzer({nowarn_function, bar/1}).
+
+-spec bar(atom()) -> a.
+bar(a) -> a;
+bar(b) -> b.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/contracts_with_subtypes.erl b/lib/dialyzer/test/small_SUITE_data/src/contracts_with_subtypes.erl
index d7dfd9752e..dbabd904c2 100644
--- a/lib/dialyzer/test/small_SUITE_data/src/contracts_with_subtypes.erl
+++ b/lib/dialyzer/test/small_SUITE_data/src/contracts_with_subtypes.erl
@@ -136,10 +136,14 @@ q(ab) -> rec2({a, b}); % breaks the contract
q(ba) -> rec2({b, a}); % breaks the contract
q(aba) -> rec2({a, {b, a}}); % breaks the contract
q(bab) -> rec2({b, {a, b}}); % breaks the contract
-q(abab) -> rec2({a, {b, {a, b}}});
-q(baba) -> rec2({b, {a, {b, a}}});
-q(ababa) -> rec2({a, {b, {a, {b, a}}}});
-q(babab) -> rec2({b, {a, {b, {a, b}}}}).
+q(abab) -> rec2({a, {b, {a, b}}}); % breaks the contract
+q(baba) -> rec2({b, {a, {b, a}}}); % breaks the contract
+q(ababa) -> rec2({a, {b, {a, {b, a}}}}); % breaks the contract
+q(babab) -> rec2({b, {a, {b, {a, b}}}}); % breaks the contract
+q(ababab) -> rec2({a, {b, {a, {b, {a, b}}}}});
+q(bababa) -> rec2({b, {a, {b, {a, {b, a}}}}});
+q(abababa) -> rec2({a, {b, {a, {b, {a, {b, a}}}}}});
+q(bababab) -> rec2({b, {a, {b, {a, {b, {a, b}}}}}}).
%===============================================================================
diff --git a/lib/dialyzer/test/small_SUITE_data/src/ditrap.erl b/lib/dialyzer/test/small_SUITE_data/src/ditrap.erl
new file mode 100644
index 0000000000..2d75f25bd5
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/ditrap.erl
@@ -0,0 +1,47 @@
+%% A bug reported by Tail-f Systems. The problem is that record types
+%% are included without properly limiting their depth.
+
+-module(ditrap).
+
+-define(tref(T), ?MODULE:T).
+-define(fref(T), ?MODULE:T).
+
+-export_type([ module_rec/0
+ , typedef_rec/0
+ , type_spec_fun/0
+ ]).
+
+-record(type, {
+ base :: 'builtin' | external:random_type() | ?tref(typedef_rec()),
+ type_spec_fun :: ?fref(type_spec_fun())
+ }).
+
+-record(typedef, {type :: #type{}}).
+
+-record(typedefs, {
+ map :: ?tref(typedef_rec()),
+ parent :: 'undefined' | #typedefs{}
+ }).
+
+-record(sn, {
+ module :: ?tref(module_rec()),
+ typedefs :: #typedefs{},
+ type :: 'undefined' | #type{},
+ keys :: 'undefined' | [#sn{}],
+ children = [] :: [#sn{}]
+ }).
+
+-record(augment, {children = [] :: [#sn{}]}).
+
+-record(module, {
+ submodules = [] :: [{#module{}, external:pos()}],
+ typedefs = #typedefs{} :: #typedefs{},
+ children = [] :: [#sn{}],
+ remote_augments = [] :: [{ModuleName :: atom(), [#augment{}]}],
+ local_augments = [] :: [#augment{}]
+ }).
+
+-type typedef_rec() :: #typedef{}.
+-type module_rec() :: #module{}.
+
+-type type_spec_fun() :: undefined | fun((#type{}, #module{}) -> any()).
diff --git a/lib/dialyzer/test/small_SUITE_data/src/maps_redef2.erl b/lib/dialyzer/test/small_SUITE_data/src/maps_redef2.erl
new file mode 100644
index 0000000000..945b2a9144
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/maps_redef2.erl
@@ -0,0 +1,23 @@
+%% In 17, the linter says that map(A) redefines 'type map', which is
+%% allowed until next release. However, Dialyzer used to replace
+%% map(A) with #{}, which resulted in warnings.
+
+-module(maps_redef2).
+
+-export([t/0]).
+
+-type map(_A) :: integer().
+
+t() ->
+ M = new(),
+ t1(M).
+
+-spec t1(map(_)) -> map(_).
+
+t1(A) ->
+ A + A.
+
+-spec new() -> map(_).
+
+new() ->
+ 3.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/request1.erl b/lib/dialyzer/test/small_SUITE_data/src/request1.erl
new file mode 100644
index 0000000000..a6c4ab8dbd
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/request1.erl
@@ -0,0 +1,12 @@
+-module(request1).
+
+-export([a/0]).
+
+-dialyzer(unmatched_returns).
+
+a() ->
+ b(),
+ 1.
+
+b() ->
+ {a, b}.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/suppress_request.erl b/lib/dialyzer/test/small_SUITE_data/src/suppress_request.erl
new file mode 100644
index 0000000000..c4275fa110
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/suppress_request.erl
@@ -0,0 +1,50 @@
+-module(suppress_request).
+
+-export([test1/1, test1_b/1, test2/0, test2_b/0,
+ test3/0, test3_b/0, test4/0, test4_b/0]).
+
+-dialyzer({[specdiffs], test1/1}).
+-spec test1(a | b) -> ok. % spec is subtype
+test1(A) ->
+ ok = test1_1(A).
+
+-spec test1_b(a | b) -> ok. % spec is subtype (suppressed by default)
+test1_b(A) ->
+ ok = test1_1(A).
+
+-spec test1_1(a | b | c) -> ok.
+test1_1(_) ->
+ ok.
+
+-dialyzer(unmatched_returns).
+test2() ->
+ tuple(), % unmatched
+ ok.
+
+test2_b() ->
+ tuple(), % unmatched
+ ok.
+
+-dialyzer({[no_return, no_match], [test3/0]}).
+test3() -> % no local return (suppressed)
+ A = fun(_) ->
+ 1
+ end,
+ A = 2. % can never succeed (suppressed)
+
+test3_b() -> % no local return (requested by default)
+ A = fun(_) ->
+ 1
+ end,
+ A = 2. % can never succeed (requested by default)
+
+-dialyzer(no_improper_lists).
+test4() ->
+ [1 | 2]. % improper list (suppressed)
+
+-dialyzer({no_improper_lists, test4_b/0}).
+test4_b() ->
+ [1 | 2]. % improper list (suppressed)
+
+tuple() ->
+ {a, b}.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/suppression1.erl b/lib/dialyzer/test/small_SUITE_data/src/suppression1.erl
new file mode 100644
index 0000000000..00534704c3
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/suppression1.erl
@@ -0,0 +1,33 @@
+-module(suppression1).
+
+-export([a/1, b/1, c/0]).
+
+-dialyzer({nowarn_function, a/1}).
+
+-spec a(_) -> integer().
+
+a(_) ->
+ A = fun(_) ->
+ B = fun(_) ->
+ x = 7
+ end,
+ B = 1
+ end,
+ A.
+
+-spec b(_) -> integer().
+
+-dialyzer({nowarn_function, b/1}).
+
+b(_) ->
+ A = fun(_) ->
+ 1
+ end,
+ A = 2.
+
+-record(r, {a = a :: integer()}).
+
+-dialyzer({nowarn_function, c/0}).
+
+c() ->
+ #r{}.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/suppression2.erl b/lib/dialyzer/test/small_SUITE_data/src/suppression2.erl
new file mode 100644
index 0000000000..4cba53fdce
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/suppression2.erl
@@ -0,0 +1,32 @@
+-module(suppression2).
+
+-export([a/1, b/1, c/0]).
+
+-dialyzer({nowarn_function, [a/1, b/1, c/0]}).
+-dialyzer([no_undefined_callbacks]).
+
+-behaviour(not_a_behaviour).
+
+-spec a(_) -> integer().
+
+a(_) ->
+ A = fun(_) ->
+ B = fun(_) ->
+ x = 7
+ end,
+ B = 1
+ end,
+ A.
+
+-spec b(_) -> integer().
+
+b(_) ->
+ A = fun(_) ->
+ 1
+ end,
+ A = 2.
+
+-record(r, {a = a :: integer()}).
+
+c() ->
+ #r{}.
diff --git a/lib/diameter/doc/src/diameter.xml b/lib/diameter/doc/src/diameter.xml
index 00b54ffbc4..638c1c4c2b 100644
--- a/lib/diameter/doc/src/diameter.xml
+++ b/lib/diameter/doc/src/diameter.xml
@@ -21,7 +21,7 @@
<copyright>
<year>2011</year>
-<year>2014</year>
+<year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -111,7 +111,7 @@ Defined in &dict_data_types;.</p>
<tag><c>application_alias() = term()</c></tag>
<item>
<p>
-A name identifying a Diameter application in
+Name identifying a Diameter application in
service configuration.
Passed to &call; when sending requests
defined by the application.</p>
@@ -129,7 +129,7 @@ ExtraArgs = list()
</pre>
<p>
-A module implementing the callback interface defined in &man_app;,
+Module implementing the callback interface defined in &man_app;,
along with any
extra arguments to be appended to those documented.
Note that extra arguments specific to an outgoing request can be
@@ -156,7 +156,7 @@ Has one the following types.</p>
<tag><c>{alias, &application_alias;}</c></tag>
<item>
<p>
-A unique identifier for the application in the scope of the
+Unique identifier for the application in the scope of the
service.
Defaults to the value of the <c>dictionary</c> option if
unspecified.</p>
@@ -165,7 +165,7 @@ unspecified.</p>
<tag><c>{dictionary, atom()}</c></tag>
<item>
<p>
-The name of an encode/decode module for the Diameter
+Name of an encode/decode module for the Diameter
messages defined by the application.
These modules are generated from files whose format is documented in
&man_dict;.</p>
@@ -174,7 +174,7 @@ These modules are generated from files whose format is documented in
<tag><c>{module, &application_module;}</c></tag>
<item>
<p>
-The callback module with which messages of the Diameter application are
+Callback module in which messages of the Diameter application are
handled.
See &man_app; for the required interface and semantics.</p>
</item>
@@ -182,7 +182,7 @@ See &man_app; for the required interface and semantics.</p>
<tag><c>{state, term()}</c></tag>
<item>
<p>
-The initial callback state.
+Initial callback state.
The prevailing state is passed to some
&man_app;
callbacks, which can then return a new state.
@@ -192,7 +192,7 @@ Defaults to the value of the <c>alias</c> option if unspecified.</p>
<tag><c>{call_mutates_state, true|false}</c></tag>
<item>
<p>
-Specifies whether or not the &app_pick_peer;
+Whether or not the &app_pick_peer;
application callback can modify the application state.
Defaults to <c>false</c> if unspecified.</p>
@@ -209,7 +209,7 @@ probably avoid it.</p>
<tag><c>{answer_errors, callback|report|discard}</c></tag>
<item>
<p>
-Determines the manner in which incoming answer messages containing
+Manner in which incoming answer messages containing
decode errors are handled.</p>
<p>
@@ -233,7 +233,7 @@ Defaults to <c>discard</c> if unspecified.</p>
<tag><c>{request_errors, answer_3xxx|answer|callback}</c></tag>
<item>
<p>
-Determines the manner in which incoming requests are handled when an
+Manner in which incoming requests are handled when an
error other than 3007 (DIAMETER_APPLICATION_UNSUPPORTED, which cannot
be associated with an application callback module), is detected.</p>
@@ -293,7 +293,7 @@ Multiple options append to the argument list.</p>
<tag><c>{filter, &peer_filter;}</c></tag>
<item>
<p>
-A filter to apply to the list of available peers before passing it to
+Filter to apply to the list of available peers before passing it to
the &app_pick_peer; callback for the application in question.
Multiple options are equivalent a single <c>all</c> filter on the
corresponding list of filters.
@@ -311,7 +311,7 @@ Defaults to 5000.</p>
<tag><c>detach</c></tag>
<item>
<p>
-Causes &call; to return <c>ok</c> as
+Cause &call; to return <c>ok</c> as
soon as the request in
question has been encoded, instead of waiting for and returning
the result from a subsequent &app_handle_answer; or
@@ -427,7 +427,7 @@ configuration passed to &start_service; or &add_transport;.</p>
<tag><c>peer_filter() = term()</c></tag>
<item>
<p>
-A filter passed to &call; in order to select candidate peers for a
+Filter passed to &call; in order to select candidate peers for a
&app_pick_peer; callback.
Has one of the following types.</p>
@@ -1032,7 +1032,7 @@ case the corresponding callbacks are applied until either all return
<tag><c>{capx_timeout, &dict_Unsigned32;}</c></tag>
<item>
<p>
-The number of milliseconds after which a transport process having an
+Number of milliseconds after which a transport process having an
established transport connection will be terminated if the expected
capabilities exchange message (CER or CEA) is not received from the peer.
For a connecting transport, the timing of connection attempts is
@@ -1079,7 +1079,7 @@ transport.</p>
<item>
<p>
-A callback invoked prior to terminating the transport process of a
+Callback invoked prior to terminating the transport process of a
transport connection having watchdog state <c>OKAY</c>.
Applied to <c>application|service|transport</c> and the
<c>&transport_ref;</c> and <c>&app_peer;</c> in question:
@@ -1095,7 +1095,7 @@ The return value can have one of the following types.</p>
<tag><c>{dpr, [option()]}</c></tag>
<item>
<p>
-Causes Disconnect-Peer-Request to be sent to the peer, the transport
+Send Disconnect-Peer-Request to the peer, the transport
process being terminated following reception of
Disconnect-Peer-Answer or timeout.
An <c>option()</c> can be one of the following.</p>
@@ -1104,7 +1104,7 @@ An <c>option()</c> can be one of the following.</p>
<tag><c>{cause, 0|rebooting|1|busy|2|goaway}</c></tag>
<item>
<p>
-The Disconnect-Cause to send, <c>REBOOTING</c>, <c>BUSY</c> and
+Disconnect-Cause to send, <c>REBOOTING</c>, <c>BUSY</c> and
<c>DO_NOT_WANT_TO_TALK_TO_YOU</c> respectively.
Defaults to <c>rebooting</c> for <c>Reason=service|application</c> and
<c>goaway</c> for <c>Reason=transport</c>.</p>
@@ -1113,7 +1113,7 @@ Defaults to <c>rebooting</c> for <c>Reason=service|application</c> and
<tag><c>{timeout, &dict_Unsigned32;}</c></tag>
<item>
<p>
-The number of milliseconds after which the transport process is
+Number of milliseconds after which the transport process is
terminated if DPA has not been received.
Defaults to 1000.</p>
</item>
@@ -1129,7 +1129,7 @@ Equivalent to <c>{dpr, []}</c>.</p>
<tag><c>close</c></tag>
<item>
<p>
-Causes the transport process to be terminated without
+Terminate the transport process without
Disconnect-Peer-Request being sent to the peer.</p>
</item>
@@ -1156,7 +1156,7 @@ Defaults to a single callback returning <c>dpr</c>.</p>
<tag><c>{length_errors, exit|handle|discard}</c></tag>
<item>
<p>
-Specifies how to deal with errors in the Message Length field of the
+How to deal with errors in the Message Length field of the
Diameter Header in an incoming message.
An error in this context is that the length is not at least 20 bytes
(the length of a Header), is not a multiple of 4 (a valid length) or
@@ -1188,11 +1188,26 @@ See &man_tcp; for the behaviour of that module.</p>
</note>
</item>
+<tag><c>{pool_size, pos_integer()}</c></tag>
+<item>
+<p>
+Number of transport processes to start.
+For a listening transport, determines the size of the pool of
+accepting transport processes, a larger number being desirable for
+processing multiple concurrent peer connection attempts.
+For a connecting transport, determines the number of connections to
+the peer in question that will be attempted to be establshed:
+the &service_opt;: <c>restrict_connections</c> should also be
+configured on the service in question to allow multiple connections to
+the same peer.</p>
+
+</item>
+
<marker id="spawn_opt"/>
<tag><c>{spawn_opt, [term()]}</c></tag>
<item>
<p>
-An options list passed to &spawn_opt; when spawning a process for an
+Options list passed to &spawn_opt; when spawning a process for an
incoming Diameter request.
Options <c>monitor</c> and <c>link</c> are ignored.</p>
@@ -1205,7 +1220,7 @@ Defaults to the list configured on the service if not specified.</p>
<tag><c>{transport_config, term(), &dict_Unsigned32; | infinity}</c></tag>
<item>
<p>
-A term passed as the third argument to the &transport_start; function of
+Term passed as the third argument to the &transport_start; function of
the relevant &transport_module; in order to
start a transport process.
Defaults to the empty list if unspecified.</p>
@@ -1233,7 +1248,7 @@ To listen on both SCTP and TCP, define one transport for each.</p>
<tag><c>{transport_module, atom()}</c></tag>
<item>
<p>
-A module implementing a transport process as defined in &man_transport;.
+Module implementing a transport process as defined in &man_transport;.
Defaults to <c>diameter_tcp</c> if unspecified.</p>
<p>
@@ -1253,7 +1268,7 @@ corresponding timeout (see below) or all fail.</p>
<tag><c>{watchdog_config, [{okay|suspect, non_neg_integer()}]}</c></tag>
<item>
<p>
-Specifies configuration that alters the behaviour of the watchdog
+Configuration that alters the behaviour of the watchdog
state machine.
On key <c>okay</c>, the non-negative number of answered DWR
messages before transitioning from REOPEN to OKAY.
@@ -1308,7 +1323,7 @@ in predicate functions passed to &remove_transport;.</p>
<tag><c>transport_ref() = reference()</c></tag>
<item>
<p>
-An reference returned by &add_transport; that
+Reference returned by &add_transport; that
identifies the configuration.</p>
</item>
@@ -1737,6 +1752,14 @@ connection might look as follows.</p>
The information presented here is as in the <c>connect</c> case except
that the client connections are grouped under an <c>accept</c> tuple.</p>
+<p>
+Whether or not the &transport_opt; <c>pool_size</c> affects the format
+of the listing in the case of a connecting transport, since a value
+greater than 1 implies multiple transport processes for the same
+<c>&transport_ref;</c>, as in the listening case.
+The format in this case is similar to the listening case, with a
+<c>pool</c> tuple in place of an <c>accept</c> tuple.</p>
+
</item>
<tag><c>connections</c></tag>
diff --git a/lib/diameter/examples/code/GNUmakefile b/lib/diameter/examples/code/GNUmakefile
index 98e36a99e3..81f1da5a39 100644
--- a/lib/diameter/examples/code/GNUmakefile
+++ b/lib/diameter/examples/code/GNUmakefile
@@ -1,7 +1,7 @@
#
# %CopyrightBegin%
#
-# Copyright Ericsson AB 2010-2012. All Rights Reserved.
+# Copyright Ericsson AB 2010-2015. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -20,7 +20,7 @@
EXAMPLES = client server relay # redirect proxy
CALLBACKS = $(EXAMPLES:%=%_cb)
-MODULES = peer $(EXAMPLES) $(EXAMPLES:%=%_cb)
+MODULES = node $(EXAMPLES) $(EXAMPLES:%=%_cb)
BEAM = $(MODULES:%=%.beam)
diff --git a/lib/diameter/examples/code/client.erl b/lib/diameter/examples/code/client.erl
index 46eb4a55db..be5b4cbba5 100644
--- a/lib/diameter/examples/code/client.erl
+++ b/lib/diameter/examples/code/client.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -38,7 +38,7 @@
-module(client).
-include_lib("diameter/include/diameter.hrl").
--include_lib("diameter/include/diameter_gen_base_rfc3588.hrl").
+-include_lib("diameter/include/diameter_gen_base_rfc6733.hrl").
-export([start/1, %% start a service
connect/2, %% add a connecting transport
@@ -50,17 +50,14 @@
%% both the record and list encoding here, one detached and one not,
%% is just for demonstration purposes.
-%% Convenience functions using the default service name, ?SVC_NAME.
+%% Convenience functions using the default service name.
-export([start/0,
connect/1,
stop/0,
call/0,
cast/0]).
--define(SVC_NAME, ?MODULE).
--define(APP_ALIAS, ?MODULE).
--define(CALLBACK_MOD, client_cb).
-
+-define(DEF_SVC_NAME, ?MODULE).
-define(L, atom_to_list).
%% The service configuration. As in the server example, a client
@@ -70,27 +67,27 @@
{'Origin-Realm', "example.com"},
{'Vendor-Id', 0},
{'Product-Name', "Client"},
- {'Auth-Application-Id', [?DIAMETER_APP_ID_COMMON]},
- {application, [{alias, ?APP_ALIAS},
- {dictionary, ?DIAMETER_DICT_COMMON},
- {module, ?CALLBACK_MOD}]}]).
+ {'Auth-Application-Id', [0]},
+ {application, [{alias, common},
+ {dictionary, diameter_gen_base_rfc6733},
+ {module, client_cb}]}]).
%% start/1
start(Name)
when is_atom(Name) ->
- peer:start(Name, ?SERVICE(Name)).
+ node:start(Name, ?SERVICE(Name)).
start() ->
- start(?SVC_NAME).
+ start(?DEF_SVC_NAME).
%% connect/2
connect(Name, T) ->
- peer:connect(Name, T).
+ node:connect(Name, T).
connect(T) ->
- connect(?SVC_NAME, T).
+ connect(?DEF_SVC_NAME, T).
%% call/1
@@ -99,10 +96,10 @@ call(Name) ->
RAR = #diameter_base_RAR{'Session-Id' = SId,
'Auth-Application-Id' = 0,
'Re-Auth-Request-Type' = 0},
- diameter:call(Name, ?APP_ALIAS, RAR, []).
+ diameter:call(Name, common, RAR, []).
call() ->
- call(?SVC_NAME).
+ call(?DEF_SVC_NAME).
%% cast/1
@@ -111,15 +108,15 @@ cast(Name) ->
RAR = ['RAR', {'Session-Id', SId},
{'Auth-Application-Id', 0},
{'Re-Auth-Request-Type', 1}],
- diameter:call(Name, ?APP_ALIAS, RAR, [detach]).
+ diameter:call(Name, common, RAR, [detach]).
cast() ->
- cast(?SVC_NAME).
+ cast(?DEF_SVC_NAME).
%% stop/1
stop(Name) ->
- peer:stop(Name).
+ node:stop(Name).
stop() ->
- stop(?SVC_NAME).
+ stop(?DEF_SVC_NAME).
diff --git a/lib/diameter/examples/code/node.erl b/lib/diameter/examples/code/node.erl
new file mode 100644
index 0000000000..4fe9007059
--- /dev/null
+++ b/lib/diameter/examples/code/node.erl
@@ -0,0 +1,174 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+%% A library module used by the example Diameter nodes. Does little
+%% more than provide an alternate/simplified transport configuration.
+%%
+
+-module(node).
+
+-export([start/2,
+ listen/2,
+ connect/2,
+ stop/1]).
+
+-type protocol()
+ :: tcp | sctp.
+
+-type ip_address()
+ :: default
+ | inet:ip_address().
+
+-type server_transport()
+ :: protocol()
+ | {protocol(), ip_address(), non_neg_integer()}.
+
+-type server_opts()
+ :: server_transport()
+ | {server_transport(), [diameter:transport_opt()]}
+ | [diameter:transport_opt()].
+
+-type client_transport()
+ :: protocol() | any
+ | {protocol() | any, ip_address(), non_neg_integer()}
+ | {protocol() | any, ip_address(), ip_address(), non_neg_integer()}.
+
+-type client_opts()
+ :: client_transport()
+ | {client_transport(), [diameter:transport_opt()]}
+ | [diameter:transport_opt()].
+
+%% The server_transport() and client_transport() config is just
+%% convenience: arbitrary options can be specifed as a
+%% [diameter:transport_opt()].
+
+-define(DEFAULT_PORT, 3868).
+
+%% ---------------------------------------------------------------------------
+%% Interface functions
+%% ---------------------------------------------------------------------------
+
+%% start/2
+
+-spec start(diameter:service_name(), [diameter:service_opt()])
+ -> ok
+ | {error, term()}.
+
+start(Name, Opts)
+ when is_atom(Name), is_list(Opts) ->
+ diameter:start_service(Name, Opts).
+
+%% connect/2
+
+-spec connect(diameter:service_name(), client_opts())
+ -> {ok, diameter:transport_ref()}
+ | {error, term()}.
+
+connect(Name, Opts)
+ when is_list(Opts) ->
+ diameter:add_transport(Name, {connect, Opts});
+
+connect(Name, {T, Opts}) ->
+ connect(Name, Opts ++ client_opts(T));
+
+connect(Name, T) ->
+ connect(Name, [{connect_timer, 5000} | client_opts(T)]).
+
+%% listen/2
+
+-spec listen(diameter:service_name(), server_opts())
+ -> {ok, diameter:transport_ref()}
+ | {error, term()}.
+
+listen(Name, Opts)
+ when is_list(Opts) ->
+ diameter:add_transport(Name, {listen, Opts});
+
+listen(Name, {T, Opts}) ->
+ listen(Name, Opts ++ server_opts(T));
+
+listen(Name, T) ->
+ listen(Name, server_opts(T)).
+
+%% stop/1
+
+-spec stop(diameter:service_name())
+ -> ok
+ | {error, term()}.
+
+stop(Name) ->
+ diameter:stop_service(Name).
+
+%% ---------------------------------------------------------------------------
+%% Internal functions
+%% ---------------------------------------------------------------------------
+
+%% server_opts/1
+%%
+%% Return transport options for a listening transport.
+
+server_opts({T, Addr, Port}) ->
+ [{transport_module, tmod(T)},
+ {transport_config, [{reuseaddr, true},
+ {ip, addr(Addr)},
+ {port, Port}]}];
+
+server_opts(T) ->
+ server_opts({T, loopback, ?DEFAULT_PORT}).
+
+%% client_opts/1
+%%
+%% Return transport options for a connecting transport.
+
+client_opts({T, LA, RA, RP})
+ when T == all; %% backwards compatibility
+ T == any ->
+ [[S, {C,Os}], T] = [client_opts({P, LA, RA, RP}) || P <- [sctp,tcp]],
+ [S, {C,Os,2000} | T];
+
+client_opts({T, LA, RA, RP}) ->
+ [{transport_module, tmod(T)},
+ {transport_config, [{raddr, addr(RA)},
+ {rport, RP},
+ {reuseaddr, true}
+ | ip(LA)]}];
+
+client_opts({T, RA, RP}) ->
+ client_opts({T, default, RA, RP});
+
+client_opts(T) ->
+ client_opts({T, loopback, loopback, ?DEFAULT_PORT}).
+
+%% ---------------------------------------------------------------------------
+
+tmod(tcp) -> diameter_tcp;
+tmod(sctp) -> diameter_sctp.
+
+ip(default) ->
+ [];
+ip(loopback) ->
+ [{ip, {127,0,0,1}}];
+ip(Addr) ->
+ [{ip, Addr}].
+
+addr(loopback) ->
+ {127,0,0,1};
+addr(A) ->
+ A.
diff --git a/lib/diameter/examples/code/peer.erl b/lib/diameter/examples/code/peer.erl
deleted file mode 100644
index 7519abfb2c..0000000000
--- a/lib/diameter/examples/code/peer.erl
+++ /dev/null
@@ -1,150 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
-%%
-%% The contents of this file are subject to the Erlang Public License,
-%% Version 1.1, (the "License"); you may not use this file except in
-%% compliance with the License. You should have received a copy of the
-%% Erlang Public License along with this software. If not, it can be
-%% retrieved online at http://www.erlang.org/.
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% %CopyrightEnd%
-%%
-
-%%
-%% A library module that factors out commonality in the example
-%% Diameter peers.
-%%
-
--module(peer).
-
--include_lib("diameter/include/diameter.hrl").
--include_lib("diameter/include/diameter_gen_base_rfc3588.hrl").
-
--export([start/2,
- listen/2,
- connect/2,
- stop/1]).
-
--type service_name()
- :: term().
-
--type protocol()
- :: tcp | sctp.
-
--type ip_address()
- :: default
- | inet:ip_address().
-
--type server_config()
- :: protocol()
- | {protocol(), ip_address(), non_neg_integer()}.
-
--type client_config()
- :: protocol()
- | {protocol(), ip_address(), non_neg_integer()}
- | {protocol(), ip_address(), ip_address(), non_neg_integer()}.
-
--define(DEFAULT_PORT, 3868).
-
-%% ---------------------------------------------------------------------------
-%% Interface functions
-%% ---------------------------------------------------------------------------
-
-%% start/2
-
--spec start(service_name(), list())
- -> ok
- | {error, term()}.
-
-start(Name, Opts)
- when is_atom(Name), is_list(Opts) ->
- diameter:start_service(Name, Opts).
-
-%% connect/2
-
--spec connect(service_name(), client_config())
- -> {ok, reference()}
- | {error, term()}.
-
-connect(Name, T) ->
- diameter:add_transport(Name, {connect, [{connect_timer, 5000}
- | client(T)]}).
-
-%% listen/2
-
--spec listen(service_name(), server_config())
- -> {ok, reference()}
- | {error, term()}.
-
-listen(Name, T) ->
- diameter:add_transport(Name, {listen, server(T)}).
-
-%% stop/1
-
--spec stop(service_name())
- -> ok
- | {error, term()}.
-
-stop(Name) ->
- diameter:stop_service(Name).
-
-%% ---------------------------------------------------------------------------
-%% Internal functions
-%% ---------------------------------------------------------------------------
-
-%% server/1
-%%
-%% Return config for a listening transport.
-
-server({T, Addr, Port}) ->
- [{transport_module, tmod(T)},
- {transport_config, [{reuseaddr, true},
- {ip, addr(Addr)},
- {port, Port}]}];
-
-server(T) ->
- server({T, loopback, ?DEFAULT_PORT}).
-
-%% client/1
-%%
-%% Return config for a connecting transport.
-
-client({all, LA, RA, RP}) ->
- [[M,{K,C}], T]
- = [client({P, LA, RA, RP}) || P <- [sctp,tcp]],
- [M, {K,C,2000} | T];
-
-client({T, LA, RA, RP}) ->
- [{transport_module, tmod(T)},
- {transport_config, [{raddr, addr(RA)},
- {rport, RP},
- {reuseaddr, true}
- | ip(LA)]}];
-
-client({T, RA, RP}) ->
- client({T, default, RA, RP});
-
-client(T) ->
- client({T, loopback, loopback, ?DEFAULT_PORT}).
-
-tmod(tcp) -> diameter_tcp;
-tmod(sctp) -> diameter_sctp.
-
-ip(default) ->
- [];
-ip(loopback) ->
- [{ip, {127,0,0,1}}];
-ip(Addr) ->
- [{ip, Addr}].
-
-addr(loopback) ->
- {127,0,0,1};
-addr(A) ->
- A.
diff --git a/lib/diameter/examples/code/relay.erl b/lib/diameter/examples/code/relay.erl
index d3438f83f3..0aa3cd06d3 100644
--- a/lib/diameter/examples/code/relay.erl
+++ b/lib/diameter/examples/code/relay.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2012. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -31,9 +31,6 @@
-module(relay).
--include_lib("diameter/include/diameter.hrl").
--include_lib("diameter/include/diameter_gen_base_rfc3588.hrl").
-
-export([start/1,
listen/2,
connect/2,
@@ -44,49 +41,47 @@
connect/1,
stop/0]).
--define(APP_ALIAS, ?MODULE).
--define(SVC_NAME, ?MODULE).
--define(CALLBACK_MOD, relay_cb).
+-define(DEF_SVC_NAME, ?MODULE).
%% The service configuration.
-define(SERVICE(Name), [{'Origin-Host', atom_to_list(Name) ++ ".example.com"},
{'Origin-Realm', "example.com"},
{'Vendor-Id', 193},
{'Product-Name', "RelayAgent"},
- {'Auth-Application-Id', [?DIAMETER_APP_ID_RELAY]},
- {application, [{alias, ?MODULE},
- {dictionary, ?DIAMETER_DICT_RELAY},
- {module, ?CALLBACK_MOD}]}]).
+ {'Auth-Application-Id', [16#FFFFFFFF]},
+ {application, [{alias, relay},
+ {dictionary, diameter_relay},
+ {module, relay_cb}]}]).
%% start/1
start(Name)
when is_atom(Name) ->
- peer:start(Name, ?SERVICE(Name)).
+ node:start(Name, ?SERVICE(Name)).
start() ->
- start(?SVC_NAME).
+ start(?DEF_SVC_NAME).
%% listen/2
listen(Name, T) ->
- peer:listen(Name, T).
+ node:listen(Name, T).
listen(T) ->
- listen(?SVC_NAME, T).
+ listen(?DEF_SVC_NAME, T).
%% connect/2
connect(Name, T) ->
- peer:connect(Name, T).
+ node:connect(Name, T).
connect(T) ->
- connect(?SVC_NAME, T).
+ connect(?DEF_SVC_NAME, T).
%% stop/1
stop(Name) ->
- peer:stop(Name).
+ node:stop(Name).
stop() ->
- stop(?SVC_NAME).
+ stop(?DEF_SVC_NAME).
diff --git a/lib/diameter/examples/code/server.erl b/lib/diameter/examples/code/server.erl
index 3959461cec..8c91e68895 100644
--- a/lib/diameter/examples/code/server.erl
+++ b/lib/diameter/examples/code/server.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2012. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -34,21 +34,16 @@
-module(server).
--include_lib("diameter/include/diameter.hrl").
--include_lib("diameter/include/diameter_gen_base_rfc3588.hrl").
-
-export([start/1, %% start a service
listen/2, %% add a listening transport
stop/1]). %% stop a service
-%% Convenience functions using the default service name, ?SVC_NAME.
+%% Convenience functions using the default service name.
-export([start/0,
listen/1,
stop/0]).
--define(SVC_NAME, ?MODULE).
--define(APP_ALIAS, ?MODULE).
--define(CALLBACK_MOD, server_cb).
+-define(DEF_SVC_NAME, ?MODULE).
%% The service configuration. In a server supporting multiple Diameter
%% applications each application may have its own, although they could all
@@ -57,32 +52,32 @@
{'Origin-Realm', "example.com"},
{'Vendor-Id', 193},
{'Product-Name', "Server"},
- {'Auth-Application-Id', [?DIAMETER_APP_ID_COMMON]},
- {application, [{alias, ?APP_ALIAS},
- {dictionary, ?DIAMETER_DICT_COMMON},
- {module, ?CALLBACK_MOD}]}]).
+ {'Auth-Application-Id', [0]},
+ {application, [{alias, common},
+ {dictionary, diameter_gen_base_rfc6733},
+ {module, server_cb}]}]).
%% start/1
start(Name)
when is_atom(Name) ->
- peer:start(Name, ?SERVICE(Name)).
+ node:start(Name, ?SERVICE(Name)).
start() ->
- start(?SVC_NAME).
+ start(?DEF_SVC_NAME).
%% listen/2
listen(Name, T) ->
- peer:listen(Name, T).
+ node:listen(Name, T).
listen(T) ->
- listen(?SVC_NAME, T).
+ listen(?DEF_SVC_NAME, T).
%% stop/1
stop(Name) ->
- peer:stop(Name).
+ node:stop(Name).
stop() ->
- stop(?SVC_NAME).
+ stop(?DEF_SVC_NAME).
diff --git a/lib/diameter/examples/code/server_cb.erl b/lib/diameter/examples/code/server_cb.erl
index 9d8d395d06..071e152493 100644
--- a/lib/diameter/examples/code/server_cb.erl
+++ b/lib/diameter/examples/code/server_cb.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -24,7 +24,7 @@
-module(server_cb).
-include_lib("diameter/include/diameter.hrl").
--include_lib("diameter/include/diameter_gen_base_rfc3588.hrl").
+-include_lib("diameter/include/diameter_gen_base_rfc6733.hrl").
%% diameter callbacks
-export([peer_up/3,
diff --git a/lib/diameter/include/diameter_gen.hrl b/lib/diameter/include/diameter_gen.hrl
index bc25f7d472..8272904856 100644
--- a/lib/diameter/include/diameter_gen.hrl
+++ b/lib/diameter/include/diameter_gen.hrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -25,6 +25,9 @@
-define(THROW(T), throw({?MODULE, T})).
+%% Tag common to generated dictionaries.
+-define(TAG, diameter_gen).
+
%% Key to a value in the process dictionary that determines whether or
%% not an unrecognized AVP setting the M-bit should be regarded as an
%% error or not. See is_strict/0.
@@ -48,13 +51,20 @@
%% dictionary.
putr(K,V) ->
- put({?MODULE, K}, V).
+ put({?TAG, K}, V).
getr(K) ->
- get({?MODULE, K}).
+ case get({?TAG, K}) of
+ undefined ->
+ V = erase({?MODULE, K}), %% written in old code
+ V == undefined orelse putr(K,V),
+ V;
+ V ->
+ V
+ end.
eraser(K) ->
- erase({?MODULE, K}).
+ erase({?TAG, K}).
%% ---------------------------------------------------------------------------
%% # encode_avps/2
@@ -313,12 +323,20 @@ d(Name, Avp, Acc) ->
%% decode is packed into 'AVP'.
Mod = dict(Failed), %% Dictionary to decode in.
+ %% On decode, a Grouped AVP is represented as a #diameter_avp{}
+ %% list with AVP as head and component AVPs as tail. On encode,
+ %% data can be a list of component AVPs.
+
try Mod:avp(decode, Data, AvpName) of
V ->
{Avps, T} = Acc,
{H, A} = ungroup(V, Avp),
{[H | Avps], pack_avp(Name, A, T)}
catch
+ throw: {?TAG, {grouped, RC, ComponentAvps}} ->
+ {Avps, {Rec, Errors}} = Acc,
+ A = trim(Avp),
+ {[[A | trim(ComponentAvps)] | Avps], {Rec, [{RC, A} | Errors]}};
error: Reason ->
d(undefined == Failed orelse is_failed(),
Reason,
@@ -338,6 +356,10 @@ d(Name, Avp, Acc) ->
trim(#diameter_avp{data = <<0:1, Bin/binary>>} = Avp) ->
Avp#diameter_avp{data = Bin};
+trim(Avps)
+ when is_list(Avps) ->
+ lists:map(fun trim/1, Avps);
+
trim(Avp) ->
Avp.
@@ -582,22 +604,37 @@ value(_, Avp) ->
%% # grouped_avp/3
%% ---------------------------------------------------------------------------
--spec grouped_avp(decode, avp_name(), binary())
+-spec grouped_avp(decode, avp_name(), bitstring())
-> {avp_record(), [avp()]};
(encode, avp_name(), avp_record() | avp_values())
-> binary()
| no_return().
+%% Length error induced by diameter_codec:collect_avps/1.
+grouped_avp(decode, _Name, <<0:1, _/binary>>) ->
+ throw({?TAG, {grouped, 5014, []}});
+
grouped_avp(decode, Name, Data) ->
- {Rec, Avps, []} = decode_avps(Name, diameter_codec:collect_avps(Data)),
- {Rec, Avps};
-%% A failed match here will result in 5004. Note that this is the only
-%% AVP type that doesn't just return the decoded record, also
-%% returning the list of component AVP's.
+ grouped_decode(Name, diameter_codec:collect_avps(Data));
grouped_avp(encode, Name, Data) ->
encode_avps(Name, Data).
+%% grouped_decode/2
+%%
+%% Note that Grouped is the only AVP type that doesn't just return a
+%% decoded value, also returning the list of component diameter_avp
+%% records.
+
+grouped_decode(_Name, {Error, Acc}) ->
+ {RC, Avp} = Error,
+ throw({?TAG, {grouped, RC, [Avp | Acc]}});
+
+grouped_decode(Name, ComponentAvps) ->
+ {Rec, Avps, Es} = decode_avps(Name, ComponentAvps),
+ [] == Es orelse throw({?TAG, {grouped, 5004, Avps}}), %% decode failure
+ {Rec, Avps}.
+
%% ---------------------------------------------------------------------------
%% # empty_group/1
%% ---------------------------------------------------------------------------
diff --git a/lib/diameter/src/base/diameter.erl b/lib/diameter/src/base/diameter.erl
index d74e091e11..1bbdf6e34d 100644
--- a/lib/diameter/src/base/diameter.erl
+++ b/lib/diameter/src/base/diameter.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -337,6 +337,7 @@ call(SvcName, App, Message) ->
:: {transport_module, atom()}
| {transport_config, any()}
| {transport_config, any(), 'Unsigned32'() | infinity}
+ | {pool_size, pos_integer()}
| {applications, [app_alias()]}
| {capabilities, [capability()]}
| {capabilities_cb, evaluable()}
diff --git a/lib/diameter/src/base/diameter_codec.erl b/lib/diameter/src/base/diameter_codec.erl
index a2b04bfd63..b4ecb63961 100644
--- a/lib/diameter/src/base/diameter_codec.erl
+++ b/lib/diameter/src/base/diameter_codec.erl
@@ -390,6 +390,9 @@ sequence_numbers(#diameter_packet{bin = Bin})
sequence_numbers(#diameter_packet{header = #diameter_header{} = H}) ->
sequence_numbers(H);
+sequence_numbers(#diameter_packet{msg = [#diameter_header{} = H | _]}) ->
+ sequence_numbers(H);
+
sequence_numbers(#diameter_header{hop_by_hop_id = H,
end_to_end_id = E}) ->
{H,E};
@@ -561,14 +564,14 @@ split_data(Bin, Len) ->
<<Data:Len/binary, _:Pad/binary, Rest/binary>> ->
{Data, Rest};
_ ->
- %% Header length points past the end of the message. As
- %% stated in the 6733 text above, it's sufficient to
- %% return a zero-filled minimal payload if this is a
- %% request. Do this (in cases that we know the type) by
- %% inducing a decode failure and letting the dictionary's
- %% decode (in diameter_gen) deal with it. Here we don't
- %% know type. If the type isn't known, then the decode
- %% just strips the extra bit.
+ %% Header length points past the end of the message, or
+ %% doesn't span the header. As stated in the 6733 text
+ %% above, it's sufficient to return a zero-filled minimal
+ %% payload if this is a request. Do this (in cases that we
+ %% know the type) by inducing a decode failure and letting
+ %% the dictionary's decode (in diameter_gen) deal with it.
+ %% Here we don't know type. If the type isn't known, then
+ %% the decode just strips the extra bit.
{<<0:1, Bin/binary>>, <<>>}
end.
@@ -582,6 +585,8 @@ split_data(Bin, Len) ->
%% dictionary doesn't know about specific AVP's.
%% Grouped AVP whose components need packing ...
+pack_avp([#diameter_avp{} = A | Avps]) ->
+ pack_avp(A#diameter_avp{data = Avps});
pack_avp(#diameter_avp{data = [#diameter_avp{} | _] = Avps} = A) ->
pack_avp(A#diameter_avp{data = encode_avps(Avps)});
diff --git a/lib/diameter/src/base/diameter_config.erl b/lib/diameter/src/base/diameter_config.erl
index dd1c9b73bb..c0a4f7df69 100644
--- a/lib/diameter/src/base/diameter_config.erl
+++ b/lib/diameter/src/base/diameter_config.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -35,10 +35,11 @@
%%
-module(diameter_config).
--compile({no_auto_import, [monitor/2]}).
-
-behaviour(gen_server).
+-compile({no_auto_import, [monitor/2, now/0]}).
+-import(diameter_lib, [now/0]).
+
-export([start_service/2,
stop_service/1,
add_transport/2,
@@ -554,6 +555,9 @@ opt({watchdog_config, L}) ->
opt({spawn_opt, Opts}) ->
is_list(Opts);
+opt({pool_size, N}) ->
+ is_integer(N) andalso 0 < N;
+
%% Options that we can't validate.
opt({K, _})
when K == transport_config;
diff --git a/lib/diameter/src/base/diameter_lib.erl b/lib/diameter/src/base/diameter_lib.erl
index 5b3a2063f8..d0d730f47c 100644
--- a/lib/diameter/src/base/diameter_lib.erl
+++ b/lib/diameter/src/base/diameter_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -18,12 +18,18 @@
%%
-module(diameter_lib).
+-compile({no_auto_import, [now/0]}).
-export([info_report/2,
error_report/2,
warning_report/2,
+ now/0,
+ timestamp/1,
now_diff/1,
+ micro_diff/1,
+ micro_diff/2,
time/1,
+ seed/0,
eval/1,
eval_name/1,
get_stacktrace/0,
@@ -31,6 +37,8 @@
spawn_opts/2,
wait/1,
fold_tuple/3,
+ fold_n/3,
+ for_n/2,
log/4]).
%% ---------------------------------------------------------------------------
@@ -90,13 +98,50 @@ fmt(T) ->
end.
%% ---------------------------------------------------------------------------
+%% # now/0
+%% ---------------------------------------------------------------------------
+
+-type timestamp() :: {non_neg_integer(), 0..999999, 0..999999}.
+-type now() :: integer() %% monotonic time
+ | timestamp().
+
+-spec now()
+ -> now().
+
+%% Use monotonic time if it exists, fall back to erlang:now()
+%% otherwise.
+
+now() ->
+ try
+ erlang:monotonic_time()
+ catch
+ error: undef -> erlang:now()
+ end.
+
+%% ---------------------------------------------------------------------------
+%% # timestamp/1
+%% ---------------------------------------------------------------------------
+
+-spec timestamp(NowT :: now())
+ -> timestamp().
+
+timestamp({_,_,_} = T) -> %% erlang:now()
+ T;
+
+timestamp(MonoT) -> %% monotonic time
+ MicroSecs = erlang:convert_time_resolution(MonoT + erlang:time_offset(),
+ erlang:time_resolution(),
+ 1000000),
+ Secs = MicroSecs div 1000000,
+ {Secs div 1000000, Secs rem 1000000, MicroSecs rem 1000000}.
+
+%% ---------------------------------------------------------------------------
%% # now_diff/1
%% ---------------------------------------------------------------------------
--spec now_diff(NowT)
+-spec now_diff(NowT :: now())
-> {Hours, Mins, Secs, MicroSecs}
- when NowT :: {non_neg_integer(), 0..999999, 0..999999},
- Hours :: non_neg_integer(),
+ when Hours :: non_neg_integer(),
Mins :: 0..59,
Secs :: 0..59,
MicroSecs :: 0..999999.
@@ -104,8 +149,41 @@ fmt(T) ->
%% Return timer:now_diff(now(), NowT) as an {H, M, S, MicroS} tuple
%% instead of as integer microseconds.
-now_diff({_,_,_} = Time) ->
- time(timer:now_diff(now(), Time)).
+now_diff(Time) ->
+ time(micro_diff(Time)).
+
+%% ---------------------------------------------------------------------------
+%% # micro_diff/1
+%% ---------------------------------------------------------------------------
+
+-spec micro_diff(NowT :: now())
+ -> MicroSecs
+ when MicroSecs :: non_neg_integer().
+
+micro_diff({_,_,_} = T0) ->
+ timer:now_diff(erlang:now(), T0);
+
+micro_diff(T0) -> %% monotonic time
+ erlang:convert_time_resolution(erlang:monotonic_time() - T0,
+ erlang:time_resolution(),
+ 1000000).
+
+%% ---------------------------------------------------------------------------
+%% # micro_diff/2
+%% ---------------------------------------------------------------------------
+
+-spec micro_diff(T1 :: now(), T0 :: now())
+ -> MicroSecs
+ when MicroSecs :: non_neg_integer().
+
+micro_diff(T1, T0)
+ when is_integer(T1), is_integer(T0) -> %% monotonic time
+ erlang:convert_time_resolution(T1 - T0,
+ erlang:time_resolution(),
+ 1000000);
+
+micro_diff(T1, T0) -> %% at least one erlang:now()
+ timer:now_diff(timestamp(T1), timestamp(T0)).
%% ---------------------------------------------------------------------------
%% # time/1
@@ -115,7 +193,7 @@ now_diff({_,_,_} = Time) ->
-spec time(NowT | Diff)
-> {Hours, Mins, Secs, MicroSecs}
- when NowT :: {non_neg_integer(), 0..999999, 0..999999},
+ when NowT :: timestamp(),
Diff :: non_neg_integer(),
Hours :: non_neg_integer(),
Mins :: 0..59,
@@ -134,6 +212,27 @@ time(Micro) -> %% elapsed time
{H, M, S, Micro rem 1000000}.
%% ---------------------------------------------------------------------------
+%% # seed/0
+%% ---------------------------------------------------------------------------
+
+-spec seed()
+ -> {timestamp(), {integer(), integer(), integer()}}.
+
+%% Return an argument for random:seed/1.
+
+seed() ->
+ T = now(),
+ {timestamp(T), seed(T)}.
+
+%% seed/1
+
+seed({_,_,_} = T) ->
+ T;
+
+seed(T) -> %% monotonic time
+ {erlang:phash2(node()), T, erlang:unique_integer()}.
+
+%% ---------------------------------------------------------------------------
%% # eval/1
%%
%% Evaluate a function in various forms.
@@ -247,17 +346,19 @@ opts(HeapSize, Opts) ->
%% # wait/1
%% ---------------------------------------------------------------------------
--spec wait([pid()])
+-spec wait([pid() | reference()])
-> ok.
wait(L) ->
- down([erlang:monitor(process, P) || P <- L]).
+ lists:foreach(fun down/1, L).
-down([]) ->
- ok;
-down([MRef|T]) ->
- receive {'DOWN', MRef, process, _, _} -> ok end,
- down(T).
+down(Pid)
+ when is_pid(Pid) ->
+ down(monitor(process, Pid));
+
+down(MRef)
+ when is_reference(MRef) ->
+ receive {'DOWN', MRef, process, _, _} = T -> T end.
%% ---------------------------------------------------------------------------
%% # fold_tuple/3
@@ -290,6 +391,35 @@ ft(Value, {Idx, T}) ->
setelement(Idx, T, Value).
%% ---------------------------------------------------------------------------
+%% # fold_n/3
+%% ---------------------------------------------------------------------------
+
+-spec fold_n(F, Acc0, N)
+ -> term()
+ when F :: fun((non_neg_integer(), term()) -> term()),
+ Acc0 :: term(),
+ N :: non_neg_integer().
+
+fold_n(F, Acc, N)
+ when is_integer(N), 0 < N ->
+ fold_n(F, F(N, Acc), N-1);
+
+fold_n(_, Acc, _) ->
+ Acc.
+
+%% ---------------------------------------------------------------------------
+%% # for_n/2
+%% ---------------------------------------------------------------------------
+
+-spec for_n(F, N)
+ -> non_neg_integer()
+ when F :: fun((non_neg_integer()) -> term()),
+ N :: non_neg_integer().
+
+for_n(F, N) ->
+ fold_n(fun(M,A) -> F(M), A+1 end, 0, N).
+
+%% ---------------------------------------------------------------------------
%% # log/4
%%
%% Called to have something to trace on for happenings of interest.
diff --git a/lib/diameter/src/base/diameter_peer.erl b/lib/diameter/src/base/diameter_peer.erl
index e5d4b28766..ea326dd03e 100644
--- a/lib/diameter/src/base/diameter_peer.erl
+++ b/lib/diameter/src/base/diameter_peer.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -18,9 +18,11 @@
%%
-module(diameter_peer).
-
-behaviour(gen_server).
+-compile({no_auto_import, [now/0]}).
+-import(diameter_lib, [now/0]).
+
%% Interface towards transport modules ...
-export([recv/2,
up/1,
diff --git a/lib/diameter/src/base/diameter_reg.erl b/lib/diameter/src/base/diameter_reg.erl
index 3197c1aee1..f785777874 100644
--- a/lib/diameter/src/base/diameter_reg.erl
+++ b/lib/diameter/src/base/diameter_reg.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -22,10 +22,11 @@
%%
-module(diameter_reg).
--compile({no_auto_import, [monitor/2]}).
-
-behaviour(gen_server).
+-compile({no_auto_import, [monitor/2, now/0]}).
+-import(diameter_lib, [now/0]).
+
-export([add/1,
add_new/1,
del/1,
diff --git a/lib/diameter/src/base/diameter_service.erl b/lib/diameter/src/base/diameter_service.erl
index 76b05a2ad4..04401a3d87 100644
--- a/lib/diameter/src/base/diameter_service.erl
+++ b/lib/diameter/src/base/diameter_service.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -24,6 +24,9 @@
-module(diameter_service).
-behaviour(gen_server).
+-compile({no_auto_import, [now/0]}).
+-import(diameter_lib, [now/0]).
+
%% towards diameter_service_sup
-export([start_link/1]).
@@ -610,8 +613,9 @@ st(#watchdog{ref = Ref, pid = Pid}, Refs) ->
%% st/3
st(#watchdog{pid = Pid}, Reason, Acc) ->
+ MRef = monitor(process, Pid),
Pid ! {shutdown, self(), Reason},
- [Pid | Acc].
+ [MRef | Acc].
%% ---------------------------------------------------------------------------
%% # call_service/2
@@ -765,8 +769,9 @@ reason(failure) ->
start(Ref, {T, Opts}, S)
when T == connect;
T == listen ->
+ N = proplists:get_value(pool_size, Opts, 1),
try
- {ok, start(Ref, type(T), Opts, S)}
+ {ok, start(Ref, type(T), Opts, N, S)}
catch
?FAILURE(Reason) ->
{error, Reason}
@@ -784,11 +789,16 @@ type(connect = T) -> T.
%% start/4
-start(Ref, Type, Opts, #state{watchdogT = WatchdogT,
- peerT = PeerT,
- options = SvcOpts,
- service_name = SvcName,
- service = Svc0})
+start(Ref, Type, Opts, State) ->
+ start(Ref, Type, Opts, 1, State).
+
+%% start/5
+
+start(Ref, Type, Opts, N, #state{watchdogT = WatchdogT,
+ peerT = PeerT,
+ options = SvcOpts,
+ service_name = SvcName,
+ service = Svc0})
when Type == connect;
Type == accept ->
#diameter_service{applications = Apps}
@@ -796,14 +806,19 @@ start(Ref, Type, Opts, #state{watchdogT = WatchdogT,
= merge_service(Opts, Svc0),
{_,_} = Mask = proplists:get_value(sequence, SvcOpts),
RecvData = diameter_traffic:make_recvdata([SvcName, PeerT, Apps, Mask]),
- Pid = s(Type, Ref, {{spawn_opts([Opts, SvcOpts]), RecvData},
- Opts,
- SvcOpts,
- Svc}),
- insert(WatchdogT, #watchdog{pid = Pid,
- type = Type,
- ref = Ref,
- options = Opts}),
+ T = {{spawn_opts([Opts, SvcOpts]), RecvData}, Opts, SvcOpts, Svc},
+ Rec = #watchdog{type = Type,
+ ref = Ref,
+ options = Opts},
+ diameter_lib:fold_n(fun(_,A) ->
+ [wd(Type, Ref, T, WatchdogT, Rec) | A]
+ end,
+ [],
+ N).
+
+wd(Type, Ref, T, WatchdogT, Rec) ->
+ Pid = wd(Type, Ref, T),
+ insert(WatchdogT, Rec#watchdog{pid = Pid}),
Pid.
%% Note that the service record passed into the watchdog is the merged
@@ -816,7 +831,7 @@ spawn_opts(Optss) ->
T /= link,
T /= monitor].
-s(Type, Ref, T) ->
+wd(Type, Ref, T) ->
{_MRef, Pid} = diameter_watchdog:start({Type, Ref}, T),
Pid.
@@ -1185,7 +1200,7 @@ connect_timer(Opts, Def0) ->
%% continuous restarted in case of faulty config or other problems.
tc(Time, Tc) ->
choose(Tc > ?RESTART_TC
- orelse timer:now_diff(now(), Time) > 1000*?RESTART_TC,
+ orelse diameter_lib:micro_diff(Time) > 1000*?RESTART_TC,
Tc,
?RESTART_TC).
@@ -1718,31 +1733,43 @@ info_transport(S) ->
[],
PeerD).
-%% Only a config entry for a listening transport: use it.
-transport([[{type, listen}, _] = L]) ->
- L ++ [{accept, []}];
-
-%% Only one config or peer entry for a connecting transport: use it.
-transport([[{type, connect} | _] = L]) ->
- L;
+%% Single config entry. Distinguish between pool_size config or not on
+%% a connecting transport for backwards compatibility: with the option
+%% the form is similar to the listening case, with connections grouped
+%% in a pool tuple (for lack of a better name), without as before.
+transport([[{type, Type}, {options, Opts}] = L])
+ when Type == listen;
+ Type == connect ->
+ L ++ [{K, []} || [{_,K}] <- [keys(Type, Opts)]];
%% Peer entries: discard config. Note that the peer entries have
%% length at least 3.
transport([[_,_] | L]) ->
transport(L);
-%% Possibly many peer entries for a listening transport. Note that all
-%% have the same options by construction, which is not terribly space
-%% efficient.
-transport([[{type, accept}, {options, Opts} | _] | _] = Ls) ->
- [{type, listen},
+%% Multiple tranports. Note that all have the same options by
+%% construction, which is not terribly space efficient.
+transport([[{type, Type}, {options, Opts} | _] | _] = Ls) ->
+ transport(keys(Type, Opts), Ls).
+
+%% Group transports in an accept or pool tuple ...
+transport([{Type, Key}], [[{type, _}, {options, Opts} | _] | _] = Ls) ->
+ [{type, Type},
{options, Opts},
- {accept, [lists:nthtail(2,L) || L <- Ls]}].
+ {Key, [tl(tl(L)) || L <- Ls]}];
+
+%% ... or not: there can only be one.
+transport([], [L]) ->
+ L.
+
+keys(connect = T, Opts) ->
+ [{T, pool} || lists:keymember(pool_size, 1, Opts)];
+keys(_, _) ->
+ [{listen, accept}].
peer_dict(#state{watchdogT = WatchdogT, peerT = PeerT}, Dict0) ->
try ets:tab2list(WatchdogT) of
- L ->
- lists:foldl(fun(T,A) -> peer_acc(PeerT, A, T) end, Dict0, L)
+ L -> lists:foldl(fun(T,A) -> peer_acc(PeerT, A, T) end, Dict0, L)
catch
error: badarg -> Dict0 %% service has gone down
end.
diff --git a/lib/diameter/src/base/diameter_service_sup.erl b/lib/diameter/src/base/diameter_service_sup.erl
index 153fff902f..e3177f0083 100644
--- a/lib/diameter/src/base/diameter_service_sup.erl
+++ b/lib/diameter/src/base/diameter_service_sup.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2011. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -58,7 +58,7 @@ init([]) ->
ChildSpec = {Mod,
{Mod, start_link, []},
temporary,
- 1000,
+ 5000,
worker,
[Mod]},
{ok, {Flags, [ChildSpec]}}.
diff --git a/lib/diameter/src/base/diameter_session.erl b/lib/diameter/src/base/diameter_session.erl
index 3b236f109a..c5ea0428b5 100644
--- a/lib/diameter/src/base/diameter_session.erl
+++ b/lib/diameter/src/base/diameter_session.erl
@@ -157,8 +157,8 @@ session_id(Host) ->
%% ---------------------------------------------------------------------------
init() ->
- Now = now(),
- random:seed(Now),
+ {Now, Seed} = diameter_lib:seed(),
+ random:seed(Seed),
Time = time32(Now),
Seq = (?INT32 band (Time bsl 20)) bor (random:uniform(1 bsl 20) - 1),
ets:insert(diameter_sequence, [{origin_state_id, Time},
diff --git a/lib/diameter/src/base/diameter_stats.erl b/lib/diameter/src/base/diameter_stats.erl
index 8353613d32..64ea082be0 100644
--- a/lib/diameter/src/base/diameter_stats.erl
+++ b/lib/diameter/src/base/diameter_stats.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -22,9 +22,11 @@
%%
-module(diameter_stats).
-
-behaviour(gen_server).
+-compile({no_auto_import, [now/0]}).
+-import(diameter_lib, [now/0]).
+
-export([reg/2, reg/1,
incr/3, incr/1,
read/1,
diff --git a/lib/diameter/src/base/diameter_sup.erl b/lib/diameter/src/base/diameter_sup.erl
index e5afd23dcd..4ede4086d8 100644
--- a/lib/diameter/src/base/diameter_sup.erl
+++ b/lib/diameter/src/base/diameter_sup.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2011. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -64,7 +64,7 @@ spec(Mod) ->
{Mod,
{Mod, start_link, []},
permanent,
- 1000,
+ infinity,
supervisor,
[Mod]}.
diff --git a/lib/diameter/src/base/diameter_sync.erl b/lib/diameter/src/base/diameter_sync.erl
index ce2db4b3a2..90eabece3d 100644
--- a/lib/diameter/src/base/diameter_sync.erl
+++ b/lib/diameter/src/base/diameter_sync.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2011. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -27,6 +27,9 @@
-module(diameter_sync).
-behaviour(gen_server).
+-compile({no_auto_import, [now/0]}).
+-import(diameter_lib, [now/0]).
+
-export([call/4, call/5,
cast/4, cast/5,
carp/1, carp/2]).
diff --git a/lib/diameter/src/base/diameter_traffic.erl b/lib/diameter/src/base/diameter_traffic.erl
index 3b62afca47..0b503338a6 100644
--- a/lib/diameter/src/base/diameter_traffic.erl
+++ b/lib/diameter/src/base/diameter_traffic.erl
@@ -162,24 +162,28 @@ incr_error(Dir, Id, TPid) ->
%% incr_rc/4
%% ---------------------------------------------------------------------------
--spec incr_rc(send|recv, Pkt, TPid, Dict0)
+-spec incr_rc(send|recv, Pkt, TPid, DictT)
-> {Counter, non_neg_integer()}
| Reason
when Pkt :: #diameter_packet{},
TPid :: pid(),
- Dict0 :: module(),
+ DictT :: module() | {module(), module(), module()},
Counter :: {'Result-Code', integer()}
| {'Experimental-Result', integer(), integer()},
Reason :: atom().
-incr_rc(Dir, Pkt, TPid, Dict0) ->
+incr_rc(Dir, Pkt, TPid, {Dict, _, _} = DictT) ->
try
- incr_result(Dir, Pkt, TPid, {Dict0, Dict0, Dict0})
+ incr_result(Dir, Pkt, TPid, DictT)
catch
exit: {E,_} when E == no_result_code;
E == invalid_error_bit ->
+ incr(TPid, {msg_id(Pkt#diameter_packet.header, Dict), Dir, E}),
E
- end.
+ end;
+
+incr_rc(Dir, Pkt, TPid, Dict0) ->
+ incr_rc(Dir, Pkt, TPid, {Dict0, Dict0, Dict0}).
%% ---------------------------------------------------------------------------
%% pending/1
@@ -678,7 +682,7 @@ local(Msg, TPid, {Dict, AppDict, Dict0} = DictT, Fs, ReqPkt) ->
reset(make_answer_packet(Msg, ReqPkt), Dict, Dict0),
Fs),
incr(send, Pkt, TPid, AppDict),
- incr_result(send, Pkt, TPid, DictT), %% count outgoing
+ incr_rc(send, Pkt, TPid, DictT), %% count outgoing
send(TPid, Pkt).
%% reset/3
@@ -1388,6 +1392,21 @@ make_request_packet(#diameter_packet{header = Hdr} = Pkt,
make_request_packet(Msg, Pkt) ->
Pkt#diameter_packet{msg = Msg}.
+%% make_retransmit_packet/2
+
+make_retransmit_packet(#diameter_packet{msg = [#diameter_header{} = Hdr
+ | Avps]}
+ = Pkt) ->
+ Pkt#diameter_packet{msg = [make_retransmit_header(Hdr) | Avps]};
+
+make_retransmit_packet(#diameter_packet{header = Hdr} = Pkt) ->
+ Pkt#diameter_packet{header = make_retransmit_header(Hdr)}.
+
+%% make_retransmit_header/1
+
+make_retransmit_header(Hdr) ->
+ Hdr#diameter_header{is_retransmitted = true}.
+
%% fold_record/2
fold_record(undefined, R) ->
@@ -1674,9 +1693,7 @@ retransmit({TPid, Caps, App}
have_request(Pkt0, TPid) %% Don't failover to a peer we've
andalso ?THROW(timeout), %% already sent to.
- #diameter_packet{header = Hdr0} = Pkt0,
- Hdr = Hdr0#diameter_header{is_retransmitted = true},
- Pkt = Pkt0#diameter_packet{header = Hdr},
+ Pkt = make_retransmit_packet(Pkt0),
retransmit(cb(App, prepare_retransmit, [Pkt, SvcName, {TPid, Caps}]),
Transport,
diff --git a/lib/diameter/src/base/diameter_types.erl b/lib/diameter/src/base/diameter_types.erl
index ca3338be5f..442d90c98b 100644
--- a/lib/diameter/src/base/diameter_types.erl
+++ b/lib/diameter/src/base/diameter_types.erl
@@ -75,7 +75,7 @@
%% message indicating this error MUST include the offending AVPs
%% within a Failed-AVP AVP.
%%
--define(INVALID_LENGTH(Bin), erlang:error({'DIAMETER', 5014, Bin})).
+-define(INVALID_LENGTH(Bitstr), erlang:error({'DIAMETER', 5014, Bitstr})).
%% -------------------------------------------------------------------------
%% 3588, 4.2. Basic AVP Data Formats
diff --git a/lib/diameter/src/base/diameter_watchdog.erl b/lib/diameter/src/base/diameter_watchdog.erl
index b7f2d24941..67715906e8 100644
--- a/lib/diameter/src/base/diameter_watchdog.erl
+++ b/lib/diameter/src/base/diameter_watchdog.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -122,7 +122,8 @@ i({Ack, T, Pid, {RecvData,
= Svc}}) ->
erlang:monitor(process, Pid),
wait(Ack, Pid),
- random:seed(now()),
+ {_, Seed} = diameter_lib:seed(),
+ random:seed(Seed),
putr(restart, {T, Opts, Svc}), %% save seeing it in trace
putr(dwr, dwr(Caps)), %%
{_,_} = Mask = proplists:get_value(sequence, SvcOpts),
diff --git a/lib/diameter/src/modules.mk b/lib/diameter/src/modules.mk
index a2a7a51892..c9dd4e683a 100644
--- a/lib/diameter/src/modules.mk
+++ b/lib/diameter/src/modules.mk
@@ -1,7 +1,7 @@
# %CopyrightBegin%
#
-# Copyright Ericsson AB 2010-2014. All Rights Reserved.
+# Copyright Ericsson AB 2010-2015. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -94,7 +94,7 @@ BINS = \
# Released files relative to ../examples.
EXAMPLES = \
code/GNUmakefile \
- code/peer.erl \
+ code/node.erl \
code/client.erl \
code/client_cb.erl \
code/server.erl \
diff --git a/lib/diameter/src/transport/diameter_sctp.erl b/lib/diameter/src/transport/diameter_sctp.erl
index 32e7aaca39..2c8d6f0a14 100644
--- a/lib/diameter/src/transport/diameter_sctp.erl
+++ b/lib/diameter/src/transport/diameter_sctp.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -18,9 +18,11 @@
%%
-module(diameter_sctp).
-
-behaviour(gen_server).
+-compile({no_auto_import, [now/0]}).
+-import(diameter_lib, [now/0]).
+
%% interface
-export([start/3]).
@@ -37,7 +39,8 @@
code_change/3,
terminate/2]).
--export([info/1]). %% service_info callback
+-export([listener/1,%% diameter_sync callback
+ info/1]). %% service_info callback
-export([ports/0,
ports/1]).
@@ -99,22 +102,31 @@
-record(listener,
{ref :: reference(),
socket :: gen_sctp:sctp_socket(),
- count = 0 :: uint(),
+ count = 0 :: uint(), %% attached transport processes
tmap = ets:new(?MODULE, []) :: ets:tid(),
%% {MRef, Pid|AssocId}, {AssocId, Pid}
pending = {0, ets:new(?MODULE, [ordered_set])},
tref :: reference(),
accept :: [match()]}).
%% Field tmap is used to map an incoming message or event to the
-%% relevent transport process. Field pending implements a queue of
-%% transport processes to which an association has been assigned (at
-%% comm_up and written into tmap) but for which diameter hasn't yet
-%% spawned a transport process: a short-lived state of affairs as a
-%% new transport is spawned as a consequence of a peer being taken up,
-%% transport processes being spawned by the listener on demand. In
-%% case diameter starts a transport before comm_up on a new
-%% association, pending is set to an improper list with the spawned
-%% transport as head and the queue as tail.
+%% relevant transport process. Field pending implements two queues:
+%% the first of transport-to-be processes to which an association has
+%% been assigned (at comm_up and written into tmap) but for which
+%% diameter hasn't yet spawned a transport process, a short-lived
+%% state of affairs as a new transport is spawned as a consequence of
+%% a peer being taken up, transport processes being spawned by the
+%% listener on demand; the second of started transport processes that
+%% have not yet been assigned an association.
+%%
+%% When diameter calls start/3, the transport process is either taken
+%% from the first queue or spawned and placed in the second queue
+%% until an association is established. When an association is
+%% established, a controlling process is either taken from the second
+%% queue or spawned and placed in the first queue. Thus, there are
+%% only elements in one queue at a time, so share an ets table queue
+%% and tag it with a positive length if it contains the first queue, a
+%% negative length if it contains the second queue. The case -1 is
+%% handled differently for backwards compatibility reasons.
%% ---------------------------------------------------------------------------
%% # start/3
@@ -139,9 +151,9 @@ ip(T) ->
T.
%% A listener spawns transports either as a consequence of this call
-%% when there is not yet an association to associate with it, or at
-%% comm_up on a new association in which case the call retrieves a
-%% transport from the pending queue.
+%% when there is not yet an association to assign it, or at comm_up on
+%% a new association in which case the call retrieves a transport from
+%% the pending queue.
s({accept, Ref} = A, Addrs, Opts) ->
{LPid, LAs} = listener(Ref, {Opts, Addrs}),
try gen_server:call(LPid, {A, self()}, infinity) of
@@ -226,7 +238,7 @@ i({connect, Pid, Opts, Addrs, Ref}) ->
{LAs, Sock} = open(Addrs, Rest, 0),
putr(?REF_KEY, Ref),
proc_lib:init_ack({ok, self(), LAs}),
- erlang:monitor(process, Pid),
+ monitor(process, Pid),
#transport{parent = Pid,
mode = {connect, connect(Sock, RAs, RP, [])},
socket = Sock};
@@ -236,8 +248,8 @@ i({accept, Pid, LPid, Sock, Ref})
when is_pid(Pid) ->
putr(?REF_KEY, Ref),
proc_lib:init_ack({ok, self()}),
- erlang:monitor(process, Pid),
- erlang:monitor(process, LPid),
+ monitor(process, Pid),
+ monitor(process, LPid),
#transport{parent = Pid,
mode = {accept, LPid},
socket = Sock};
@@ -246,7 +258,7 @@ i({accept, Pid, LPid, Sock, Ref})
i({accept, Ref, LPid, Sock, Id}) ->
putr(?REF_KEY, Ref),
proc_lib:init_ack({ok, self()}),
- MRef = erlang:monitor(process, LPid),
+ MRef = monitor(process, LPid),
%% Wait for a signal that the transport has been started before
%% processing other messages.
receive
@@ -270,15 +282,23 @@ close(Sock, Id) ->
%% listener/2
+%% Accepting processes can be started concurrently: ensure only one
+%% listener is started.
listener(LRef, T) ->
+ diameter_sync:call({?MODULE, listener, LRef},
+ {?MODULE, listener, [{LRef, T}]},
+ infinity,
+ infinity).
+
+listener({LRef, T}) ->
l(diameter_reg:match({?MODULE, listener, {LRef, '_'}}), LRef, T).
-%% Existing process with the listening socket ...
+%% Existing listening process ...
l([{{?MODULE, listener, {_, AS}}, LPid}], _, _) ->
- {LAs, _Sock} = AS,
- {LPid, LAs};
-
-%% ... or not: start one.
+ {LAs, _Sock} = AS,
+ {LPid, LAs};
+
+%% ... or not.
l([], LRef, T) ->
{ok, LPid, LAs} = diameter_sctp_sup:start_child({listen, LRef, T}),
{LPid, LAs}.
@@ -347,11 +367,17 @@ type(T) ->
%% # handle_call/3
%% ---------------------------------------------------------------------------
+handle_call(T, From, #listener{pending = L} = S)
+ when is_list(L) ->
+ handle_call(T, From, upgrade(S));
+
handle_call({{accept, Ref}, Pid}, _, #listener{ref = Ref,
- count = N}
+ pending = {N,Q},
+ count = K}
= S) ->
- {TPid, NewS} = accept(Ref, Pid, S),
- {reply, {ok, TPid}, NewS#listener{count = N+1}};
+ TPid = accept(Ref, Pid, S),
+ {reply, {ok, TPid}, downgrade(S#listener{pending = {N-1,Q},
+ count = K+1})};
handle_call(_, _, State) ->
{reply, nok, State}.
@@ -370,8 +396,46 @@ handle_cast(_, State) ->
handle_info(T, #transport{} = S) ->
{noreply, #transport{} = t(T,S)};
+handle_info(T, #listener{pending = L} = S)
+ when is_list(L) ->
+ handle_info(T, upgrade(S));
+
handle_info(T, #listener{} = S) ->
- {noreply, #listener{} = l(T,S)}.
+ {noreply, downgrade(#listener{} = l(T,S))}.
+
+%% upgrade/1
+
+upgrade(#listener{pending = [TPid | {0,Q}]} = S) ->
+ ets:insert(Q, {TPid, now()}),
+ S#listener{pending = {-1,Q}}.
+%% Prior to the possiblity of setting pool_size on in transport
+%% configuration, a new accepting transport was only started following
+%% the death of a predecessor, so that there was only at most one
+%% previously started transport process waiting for an association.
+%% This assumption no longer holds with pool_size > 1, in which case
+%% several accepting transports are started concurrently. Deal with
+%% this by placing the started transports in a new queue of transport
+%% processes waiting for an association.
+%%
+%% Since only one of this queue and the existing queue of controlling
+%% processes waiting for a transport to be started can be non-empty at
+%% any given time, implement both queues in the same ets table. The
+%% absolute value of the first element of the 2-tuple is the queue
+%% length, the sign says which queue it is.
+
+%% downgrade/1
+%%
+%% Revert to the pre-pool_size representation when possible, for
+%% backwards compatibility in the case that the pool_size option
+%% hasn't been used.
+
+downgrade(#listener{pending = {-1,Q}} = S) ->
+ TPid = ets:first(Q),
+ ets:delete(Q, TPid),
+ S#listener{pending = [TPid | {0,Q}]};
+
+downgrade(S) ->
+ S.
%% ---------------------------------------------------------------------------
%% # code_change/3
@@ -436,54 +500,46 @@ l({sctp, Sock, _RA, _RP, Data} = Msg, #listener{socket = Sock} = S) ->
setopts(Sock)
end;
-%% Transport is asking message to be sent. See send/3 for why the send
-%% isn't directly from the transport.
-l({send, AssocId, StreamId, Bin}, #listener{socket = Sock} = S) ->
- send(Sock, AssocId, StreamId, Bin),
- S;
+l({'DOWN', MRef, process, TPid, _}, #listener{pending = {_,Q}} = S) ->
+ down(ets:member(Q, TPid), MRef, TPid, S);
+
+%% Timeout after the last accepting process has died.
+l({timeout, TRef, close = T}, #listener{tref = TRef,
+ count = 0}) ->
+ x(T);
+l({timeout, _, close}, #listener{} = S) ->
+ S.
+
+%% down/4
%% Accepting transport has died. One that's awaiting an association ...
-l({'DOWN', MRef, process, TPid, _}, #listener{pending = [TPid | Q],
- tmap = T,
- count = N}
- = S) ->
+down(true, MRef, TPid, #listener{pending = {N,Q},
+ tmap = T,
+ count = K}
+ = S)
+ when N < 0 ->
+ ets:delete(Q, TPid),
ets:delete(T, MRef),
ets:delete(T, TPid),
- start_timer(S#listener{count = N-1,
- pending = Q});
-
-%% ... ditto and a new transport has already been started ...
-l({'DOWN', _, process, _, _} = T, #listener{pending = [TPid | Q]}
- = S) ->
- #listener{pending = NQ}
- = NewS
- = l(T, S#listener{pending = Q}),
- NewS#listener{pending = [TPid | NQ]};
-
-%% ... or not.
-l({'DOWN', MRef, process, TPid, _}, #listener{socket = Sock,
- tmap = T,
- count = N,
- pending = {P,Q}}
- = S) ->
+ start_timer(S#listener{count = K-1,
+ pending = {N+1,Q}});
+
+%% ... or one that already has one.
+down(B, MRef, TPid, #listener{socket = Sock,
+ tmap = T,
+ count = K,
+ pending = {N,Q}}
+ = S) ->
[{MRef, Id}] = ets:lookup(T, MRef), %% Id = TPid | AssocId
ets:delete(T, MRef),
ets:delete(T, Id),
Id == TPid orelse close(Sock, Id),
- case ets:lookup(Q, TPid) of
- [{TPid, _}] -> %% transport in the pending queue ...
+ if B -> %% Waiting for attachment in the pending queue ...
ets:delete(Q, TPid),
- S#listener{pending = {P-1, Q}};
- [] -> %% ... or not
- start_timer(S#listener{count = N-1})
- end;
-
-%% Timeout after the last accepting process has died.
-l({timeout, TRef, close = T}, #listener{tref = TRef,
- count = 0}) ->
- x(T);
-l({timeout, _, close}, #listener{} = S) ->
- S.
+ S#listener{pending = {N-1,Q}};
+ true -> %% ... or already attached
+ start_timer(S#listener{count = K-1})
+ end.
%% t/2
%%
@@ -582,29 +638,24 @@ accept(Opts) ->
%% No pending associations: spawn a new transport.
accept(Ref, Pid, #listener{socket = Sock,
tmap = T,
- pending = {0,_} = Q}
- = S) ->
+ pending = {N,Q}})
+ when N =< 0 ->
Arg = {accept, Pid, self(), Sock, Ref},
{ok, TPid} = diameter_sctp_sup:start_child(Arg),
- MRef = erlang:monitor(process, TPid),
+ MRef = monitor(process, TPid),
ets:insert(T, [{MRef, TPid}, {TPid, MRef}]),
- {TPid, S#listener{pending = [TPid | Q]}};
-%% Placing the transport in the pending field makes it available to
-%% the next association. The stack starts a new accepting transport
-%% only after this one brings the connection up (or dies).
-
-%% Accepting transport has died. This can happen if a new transport is
-%% started before the DOWN has arrived.
-accept(Ref, Pid, #listener{pending = [TPid | {0,_} = Q]} = S) ->
- false = is_process_alive(TPid), %% assert
- accept(Ref, Pid, S#listener{pending = Q});
+ ets:insert(Q, {TPid, now()}),
+ TPid;
+%% Placing the transport in the second pending table makes it
+%% available to the next association.
%% Pending associations: attach to the first in the queue.
-accept(_, Pid, #listener{ref = Ref, pending = {N,Q}} = S) ->
+accept(_, Pid, #listener{ref = Ref,
+ pending = {_,Q}}) ->
TPid = ets:first(Q),
TPid ! {Ref, Pid},
ets:delete(Q, TPid),
- {TPid, S#listener{pending = {N-1, Q}}}.
+ TPid.
%% send/2
@@ -718,34 +769,12 @@ up(#transport{parent = Pid,
find(Id, Data, #listener{tmap = T} = S) ->
f(ets:lookup(T, Id), Data, S).
-%% New association and a transport waiting for one: use it.
-f([],
- {_, #sctp_assoc_change{state = comm_up,
- assoc_id = Id}},
- #listener{tmap = T,
- pending = [TPid | {_,_} = Q]}
- = S) ->
- [{TPid, MRef}] = ets:lookup(T, TPid),
- ets:insert(T, [{MRef, Id}, {Id, TPid}]),
- ets:delete(T, TPid),
- {TPid, S#listener{pending = Q}};
-
-%% New association and no transport start yet: spawn one and place it
-%% in the queue.
+%% New association ...
f([],
- {_, #sctp_assoc_change{state = comm_up,
- assoc_id = Id}},
- #listener{ref = Ref,
- socket = Sock,
- tmap = T,
- pending = {N,Q}}
+ {_, #sctp_assoc_change{state = comm_up, assoc_id = Id}},
+ #listener{pending = {N,Q}}
= S) ->
- Arg = {accept, Ref, self(), Sock, Id},
- {ok, TPid} = diameter_sctp_sup:start_child(Arg),
- MRef = erlang:monitor(process, TPid),
- ets:insert(T, [{MRef, Id}, {Id, TPid}]),
- ets:insert(Q, {TPid, now()}),
- {TPid, S#listener{pending = {N+1, Q}}};
+ {find(Id, S), S#listener{pending = {N+1,Q}}};
%% Known association ...
f([{_, TPid}], _, S) ->
@@ -755,6 +784,31 @@ f([{_, TPid}], _, S) ->
f([], _, _) ->
false.
+%% find/2
+
+%% Transport waiting for an association: use it.
+find(Id, #listener{tmap = T,
+ pending = {N,Q}})
+ when N < 0 ->
+ TPid = ets:first(Q),
+ [{TPid, MRef}] = ets:lookup(T, TPid),
+ ets:insert(T, [{MRef, Id}, {Id, TPid}]),
+ ets:delete(T, TPid),
+ ets:delete(Q, TPid),
+ TPid;
+
+%% No transport start yet: spawn one and queue.
+find(Id, #listener{ref = Ref,
+ socket = Sock,
+ tmap = T,
+ pending = {_,Q}}) ->
+ Arg = {accept, Ref, self(), Sock, Id},
+ {ok, TPid} = diameter_sctp_sup:start_child(Arg),
+ MRef = monitor(process, TPid),
+ ets:insert(T, [{MRef, Id}, {Id, TPid}]),
+ ets:insert(Q, {TPid, now()}),
+ TPid.
+
%% assoc_id/1
assoc_id({[#sctp_sndrcvinfo{assoc_id = Id}], _}) ->
diff --git a/lib/diameter/src/transport/diameter_tcp.erl b/lib/diameter/src/transport/diameter_tcp.erl
index 4d1b8bec51..0b26f429fb 100644
--- a/lib/diameter/src/transport/diameter_tcp.erl
+++ b/lib/diameter/src/transport/diameter_tcp.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -37,7 +37,8 @@
code_change/3,
terminate/2]).
--export([info/1]). %% service_info callback
+-export([listener/1,%% diameter_sync callback
+ info/1]). %% service_info callback
-export([ports/0,
ports/1]).
@@ -191,7 +192,7 @@ init(T) ->
i({T, Ref, Mod, Pid, Opts, Addrs})
when T == accept;
T == connect ->
- erlang:monitor(process, Pid),
+ monitor(process, Pid),
%% Since accept/connect might block indefinitely, spawn a process
%% that does nothing but kill us with the parent until call
%% returns.
@@ -218,8 +219,8 @@ i({T, Ref, Mod, Pid, Opts, Addrs})
%% A monitor process to kill the transport if the parent dies.
i(#monitor{parent = Pid, transport = TPid} = S) ->
proc_lib:init_ack({ok, self()}),
- erlang:monitor(process, Pid),
- erlang:monitor(process, TPid),
+ monitor(process, Pid),
+ monitor(process, TPid),
S;
%% In principle a link between the transport and killer processes
%% could do the same thing: have the accepting/connecting process be
@@ -235,7 +236,7 @@ i({listen, LRef, APid, {Mod, Opts, Addrs}}) ->
LAddr = laddr(LAddrOpt, Mod, LSock),
true = diameter_reg:add_new({?MODULE, listener, {LRef, {LAddr, LSock}}}),
proc_lib:init_ack({ok, self(), {LAddr, LSock}}),
- erlang:monitor(process, APid),
+ monitor(process, APid),
start_timer(#listener{socket = LSock}).
laddr([], Mod, Sock) ->
@@ -336,17 +337,25 @@ accept(Opts) ->
%% listener/2
+%% Accepting processes can be started concurrently: ensure only one
+%% listener is started.
listener(LRef, T) ->
- l(diameter_reg:match({?MODULE, listener, {LRef, '_'}}), LRef, T).
+ diameter_sync:call({?MODULE, listener, LRef},
+ {?MODULE, listener, [{LRef, T, self()}]},
+ infinity,
+ infinity).
-%% Existing process with the listening socket ...
-l([{{?MODULE, listener, {_, AS}}, LPid}], _, _) ->
- LPid ! {accept, self()},
+listener({LRef, T, TPid}) ->
+ l(diameter_reg:match({?MODULE, listener, {LRef, '_'}}), LRef, T, TPid).
+
+%% Existing listening process ...
+l([{{?MODULE, listener, {_, AS}}, LPid}], _, _, TPid) ->
+ LPid ! {accept, TPid},
AS;
-%% ... or not: start one.
-l([], LRef, T) ->
- {ok, _, AS} = diameter_tcp_sup:start_child({listen, LRef, self(), T}),
+%% ... or not.
+l([], LRef, T, TPid) ->
+ {ok, _, AS} = diameter_tcp_sup:start_child({listen, LRef, TPid, T}),
AS.
%% get_addr/1
@@ -502,7 +511,7 @@ m({'DOWN', _, process, Pid, _}, #monitor{parent = Pid,
%% Another accept transport is attaching.
l({accept, TPid}, #listener{count = N} = S) ->
- erlang:monitor(process, TPid),
+ monitor(process, TPid),
S#listener{count = N+1};
%% Accepting process has died.
diff --git a/lib/diameter/src/transport/diameter_transport_sup.erl b/lib/diameter/src/transport/diameter_transport_sup.erl
index 6457ab78b0..284a41a752 100644
--- a/lib/diameter/src/transport/diameter_transport_sup.erl
+++ b/lib/diameter/src/transport/diameter_transport_sup.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2011. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -54,7 +54,7 @@ start_child(Name, Module) ->
Spec = {Name,
{Module, start_link, [Name]},
permanent,
- 1000,
+ infinity,
supervisor,
[Module]},
supervisor:start_child(?MODULE, Spec).
diff --git a/lib/diameter/test/diameter_app_SUITE.erl b/lib/diameter/test/diameter_app_SUITE.erl
index f68a18b5c2..cf34c762e1 100644
--- a/lib/diameter/test/diameter_app_SUITE.erl
+++ b/lib/diameter/test/diameter_app_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -187,15 +187,14 @@ xref(Config) ->
xref:stop(XRef),
+ Rel = release(), %% otp_release-ish
+
%% Only care about calls from our own application.
- [] = lists:filter(fun({{F,_,_},{T,_,_}}) ->
+ [] = lists:filter(fun({{F,_,_} = From, {_,_,_} = To}) ->
lists:member(F, Mods)
- andalso {F,T} /= {diameter_tcp, ssl}
+ andalso not ignored(From, To, Rel)
end,
Undefs),
- %% diameter_tcp does call ssl despite the latter not being listed
- %% as a dependency in the app file since ssl is only required for
- %% TLS security: it's up to a client who wants TLS to start ssl.
%% Ensure that only runtime or info modules call runtime modules.
%% It's not strictly necessary that diameter compiler modules not
@@ -214,6 +213,38 @@ xref(Config) ->
[] = lists:filter(fun(M) -> not lists:member(app(M), Deps) end,
RTdeps -- Mods).
+ignored({FromMod,_,_}, {ToMod,_,_} = To, Rel)->
+ %% diameter_tcp does call ssl despite the latter not being listed
+ %% as a dependency in the app file since ssl is only required for
+ %% TLS security: it's up to a client who wants TLS to start ssl.
+ %% The OTP 18 time api is also called if it exists, so that the
+ %% same code can be run on older releases.
+ {FromMod, ToMod} == {diameter_tcp, ssl}
+ orelse (FromMod == diameter_lib
+ andalso Rel < 18
+ andalso lists:member(To, time_api())).
+
+%% New time api in OTP 18.
+time_api() ->
+ [{erlang, F, A} || {F,A} <- [{convert_time_resolution,3},
+ {monotonic_time,0},
+ {monotonic_time,1},
+ {time_offset,0},
+ {time_offset,1},
+ {time_resolution,0},
+ {timestamp,0},
+ {unique_integer,0},
+ {unique_integer,1}]].
+
+release() ->
+ Rel = erlang:system_info(otp_release),
+ try list_to_integer(Rel) of
+ N -> N
+ catch
+ error:_ ->
+ 0 %% aka < 17
+ end.
+
unversion(App) ->
T = lists:dropwhile(fun is_vsn_ch/1, lists:reverse(App)),
lists:reverse(case T of [$-|TT] -> TT; _ -> T end).
diff --git a/lib/diameter/test/diameter_capx_SUITE.erl b/lib/diameter/test/diameter_capx_SUITE.erl
index deabdd720b..02501ce779 100644
--- a/lib/diameter/test/diameter_capx_SUITE.erl
+++ b/lib/diameter/test/diameter_capx_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -144,8 +144,8 @@ end_per_suite(_Config) ->
%% Generate a unique hostname for each testcase so that watchdogs
%% don't prevent a connection from being brought up immediately.
init_per_testcase(Name, Config) ->
- Uniq = ["." ++ integer_to_list(N) || N <- tuple_to_list(now())],
- [{host, lists:flatten([?L(Name) | Uniq])} | Config].
+ [{host, ?L(Name) ++ "." ++ diameter_util:unique_string()}
+ | Config].
init_per_group(Name, Config) ->
[{rfc, Name} | Config].
diff --git a/lib/diameter/test/diameter_codec_SUITE.erl b/lib/diameter/test/diameter_codec_SUITE.erl
index cd8ca41f66..64ea90554d 100644
--- a/lib/diameter/test/diameter_codec_SUITE.erl
+++ b/lib/diameter/test/diameter_codec_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -29,6 +29,9 @@
-export([suite/0,
all/0,
+ groups/0,
+ init_per_group/2,
+ end_per_group/2,
init_per_testcase/2,
end_per_testcase/2]).
@@ -36,9 +39,13 @@
-export([base/1,
gen/1,
lib/1,
- unknown/1]).
+ unknown/1,
+ success/1,
+ grouped_error/1,
+ failed_error/1]).
-include("diameter_ct.hrl").
+-include("diameter.hrl").
-define(L, atom_to_list).
@@ -48,7 +55,19 @@ suite() ->
[{timetrap, {seconds, 10}}].
all() ->
- [base, gen, lib, unknown].
+ [base, gen, lib, unknown, {group, recode}].
+
+groups() ->
+ [{recode, [], [success,
+ grouped_error,
+ failed_error]}].
+
+init_per_group(recode, Config) ->
+ ok = diameter:start(),
+ Config.
+
+end_per_group(_, _) ->
+ ok = diameter:stop().
init_per_testcase(gen, Config) ->
[{application, ?APP, App}] = diameter_util:consult(?APP, app),
@@ -98,3 +117,166 @@ compile(File) ->
compile(File, Opts) ->
compile:file(File, [return | Opts]).
+
+%% ===========================================================================
+
+%% Ensure a Grouped AVP is represented by a list in the avps field.
+success(_) ->
+ Avps = [{295, <<1:32>>}, %% Termination-Cause
+ {284, [{280, "Proxy-Host"}, %% Proxy-Info
+ {33, "Proxy-State"}, %%
+ {295, <<2:32>>}]}], %% Termination-Cause
+ #diameter_packet{avps = [#diameter_avp{code = 295,
+ value = 1,
+ data = <<1:32>>},
+ [#diameter_avp{code = 284},
+ #diameter_avp{code = 280},
+ #diameter_avp{code = 33},
+ #diameter_avp{code = 295,
+ value = 2,
+ data = <<2:32>>}]],
+ errors = []}
+ = str(recode(str(Avps))).
+
+%% ===========================================================================
+
+%% Ensure a Grouped AVP is represented by a list in the avps field
+%% even in the case of a decode error on a component AVP.
+grouped_error(_) ->
+ Avps = [{295, <<1:32>>}, %% Termination-Cause
+ {284, [{295, <<0:32>>}, %% Proxy-Info, Termination-Cause
+ {280, "Proxy-Host"},
+ {33, "Proxy-State"}]}],
+ #diameter_packet{avps = [#diameter_avp{code = 295,
+ value = 1,
+ data = <<1:32>>},
+ [#diameter_avp{code = 284},
+ #diameter_avp{code = 295,
+ value = undefined,
+ data = <<0:32>>},
+ #diameter_avp{code = 280},
+ #diameter_avp{code = 33}]],
+ errors = [{5004, #diameter_avp{code = 284}}]}
+ = str(recode(str(Avps))).
+
+%% ===========================================================================
+
+%% Ensure that a failed decode in Failed-AVP is acceptable, and that
+%% the component AVPs are decoded if possible.
+failed_error(_) ->
+ Avps = [{279, [{295, <<0:32>>}, %% Failed-AVP, Termination-Cause
+ {258, <<1:32>>}, %% Auth-Application-Id
+ {284, [{280, "Proxy-Host"}, %% Proxy-Info
+ {33, "Proxy-State"},
+ {295, <<0:32>>}, %% Termination-Cause, invalid
+ {258, <<2:32>>}]}]}], %% Auth-Application-Id
+ #diameter_packet{avps = [[#diameter_avp{code = 279},
+ #diameter_avp{code = 295,
+ value = undefined,
+ data = <<0:32>>},
+ #diameter_avp{code = 258,
+ value = 1,
+ data = <<1:32>>},
+ [#diameter_avp{code = 284},
+ #diameter_avp{code = 280},
+ #diameter_avp{code = 33},
+ #diameter_avp{code = 295,
+ value = undefined},
+ #diameter_avp{code = 258,
+ value = 2,
+ data = <<2:32>>}]]],
+ errors = []}
+ = sta(recode(sta(Avps))).
+
+%% ===========================================================================
+
+%% str/1
+
+str(#diameter_packet{avps = [#diameter_avp{code = 263},
+ #diameter_avp{code = 264},
+ #diameter_avp{code = 296},
+ #diameter_avp{code = 283},
+ #diameter_avp{code = 258,
+ value = 0}
+ | T]}
+ = Pkt) ->
+ Pkt#diameter_packet{avps = T};
+
+str(Avps) ->
+ OH = "diameter.erlang.org",
+ OR = "erlang.org",
+ DR = "example.com",
+ Sid = "diameter.erlang.org;123;456",
+
+ [#diameter_header{version = 1,
+ cmd_code = 275, %% STR
+ is_request = true,
+ application_id = 0,
+ hop_by_hop_id = 17,
+ end_to_end_id = 42,
+ is_proxiable = false,
+ is_error = false,
+ is_retransmitted = false}
+ | avp([{263, Sid}, %% Session-Id
+ {264, OH}, %% Origin-Host
+ {296, OR}, %% Origin-Realm
+ {283, DR}, %% Destination-Realm
+ {258, <<0:32>>}] %% Auth-Application-Id
+ ++ Avps)].
+
+%% sta/1
+
+sta(#diameter_packet{avps = [#diameter_avp{code = 263},
+ #diameter_avp{code = 268},
+ #diameter_avp{code = 264},
+ #diameter_avp{code = 296},
+ #diameter_avp{code = 278,
+ value = 4}
+ | T]}
+ = Pkt) ->
+ Pkt#diameter_packet{avps = T};
+
+sta(Avps) ->
+ OH = "diameter.erlang.org",
+ OR = "erlang.org",
+ Sid = "diameter.erlang.org;123;456",
+
+ [#diameter_header{version = 1,
+ cmd_code = 275, %% STA
+ is_request = false,
+ application_id = 0,
+ hop_by_hop_id = 17,
+ end_to_end_id = 42,
+ is_proxiable = false,
+ is_error = false,
+ is_retransmitted = false}
+ | avp([{263, Sid}, %% Session-Id
+ {268, <<2002:32>>}, %% Result-Code
+ {264, OH}, %% Origin-Host
+ {296, OR}, %% Origin-Realm
+ {278, <<4:32>>}] %% Origin-State-Id
+ ++ Avps)].
+
+avp({Code, Data}) ->
+ #diameter_avp{code = Code,
+ data = avp(Data)};
+
+avp(#diameter_avp{} = A) ->
+ A;
+
+avp([{_,_} | _] = Avps) ->
+ lists:map(fun avp/1, Avps);
+
+avp(V) ->
+ V.
+
+%% recode/1
+
+recode(Msg) ->
+ recode(Msg, diameter_gen_base_rfc6733).
+
+recode(#diameter_packet{} = Pkt, Dict) ->
+ diameter_codec:decode(Dict, diameter_codec:encode(Dict, Pkt));
+
+recode(Msg, Dict) ->
+ recode(#diameter_packet{msg = Msg}, Dict).
diff --git a/lib/diameter/test/diameter_codec_test.erl b/lib/diameter/test/diameter_codec_test.erl
index 90536dcf2b..472755c62a 100644
--- a/lib/diameter/test/diameter_codec_test.erl
+++ b/lib/diameter/test/diameter_codec_test.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -229,8 +229,7 @@ v(Max, Ord, E)
when Ord =< Max ->
diameter_enum:to_list(E);
v(Max, Ord, E) ->
- {M,S,U} = now(),
- random:seed(M,S,U),
+ random:seed(diameter_util:seed()),
v(Max, Ord, E, []).
v(0, _, _, Acc) ->
@@ -512,7 +511,7 @@ random(Mn,Mx) ->
seed(undefined) ->
put({?MODULE, seed}, true),
- random:seed(now());
+ random:seed(diameter_util:seed());
seed(true) ->
ok.
diff --git a/lib/diameter/test/diameter_ct.erl b/lib/diameter/test/diameter_ct.erl
index ed2f884681..85c502ea7f 100644
--- a/lib/diameter/test/diameter_ct.erl
+++ b/lib/diameter/test/diameter_ct.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -43,7 +43,7 @@ ct_run(Opts) ->
info(Start , info()).
info() ->
- [{time, now()},
+ [{time, diameter_lib:now()},
{process_count, erlang:system_info(process_count)}
| erlang:memory()].
@@ -56,6 +56,6 @@ info(L0, L1) ->
io:format("INFO: ~p~n", [Diff]).
diff(time, T0, T1) ->
- timer:now_diff(T1, T0);
+ diameter_lib:micro_diff(T1, T0);
diff(_, N0, N1) ->
N1 - N0.
diff --git a/lib/diameter/test/diameter_event_SUITE.erl b/lib/diameter/test/diameter_event_SUITE.erl
index f43f111d20..bfe160203c 100644
--- a/lib/diameter/test/diameter_event_SUITE.erl
+++ b/lib/diameter/test/diameter_event_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013. All Rights Reserved.
+%% Copyright Ericsson AB 2013-15. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -168,16 +168,15 @@ connect(Config, Opts) ->
{Name, Ref}.
uniq() ->
- {MS,S,US} = now(),
- lists:flatten(io_lib:format("-~p-~p-~p-", [MS,S,US])).
+ "-" ++ diameter_util:unique_string().
event(Name) ->
receive #diameter_event{service = Name, info = T} -> T end.
event(Name, TL, TH) ->
- T0 = now(),
+ T0 = diameter_lib:now(),
Event = event(Name),
- DT = timer:now_diff(now(), T0) div 1000,
+ DT = diameter_lib:micro_diff(T0) div 1000,
{true, true, DT, Event} = {TL < DT, DT < TH, DT, Event},
Event.
diff --git a/lib/diameter/test/diameter_examples_SUITE.erl b/lib/diameter/test/diameter_examples_SUITE.erl
index aef4bc35ef..ef8e459175 100644
--- a/lib/diameter/test/diameter_examples_SUITE.erl
+++ b/lib/diameter/test/diameter_examples_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2013-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -295,15 +295,15 @@ slave() ->
[{timetrap, {minutes, 10}}].
slave(_) ->
- T0 = now(),
+ T0 = diameter_lib:now(),
{ok, Node} = ct_slave:start(?MODULE, ?TIMEOUTS),
- T1 = now(),
+ T1 = diameter_lib:now(),
T2 = rpc:call(Node, erlang, now, []),
{ok, Node} = ct_slave:stop(?MODULE),
- now_diff([T0, T1, T2, now()]).
+ now_diff([T0, T1, T2, diameter_lib:now()]).
now_diff([T1,T2|_] = Ts) ->
- [timer:now_diff(T2,T1) | now_diff(tl(Ts))];
+ [diameter_lib:micro_diff(T2,T1) | now_diff(tl(Ts))];
now_diff(_) ->
[].
@@ -397,4 +397,4 @@ stop(Name)
stop(Config) ->
Prot = proplists:get_value(group, Config),
- [] = [RC || N <- ?NODES, RC <- [stop(concat(Prot, N))], RC /= ok].
+ [] = [RC || N <- ?NODES, RC <- [catch stop(concat(Prot, N))], RC /= ok].
diff --git a/lib/diameter/test/diameter_gen_sctp_SUITE.erl b/lib/diameter/test/diameter_gen_sctp_SUITE.erl
index 51ccb1e6ec..4ea5e80095 100644
--- a/lib/diameter/test/diameter_gen_sctp_SUITE.erl
+++ b/lib/diameter/test/diameter_gen_sctp_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -119,10 +119,10 @@ send_not_from_controlling_process(_) ->
send_not_from_controlling_process() ->
FPid = self(),
- {L, MRef} = spawn_monitor(fun() -> listen(FPid) end),%% listening process
+ {L, MRef} = spawn_monitor(fun() -> listen(FPid) end),
receive
{?MODULE, C, S} ->
- erlang:demonitor(MRef, [flush]),
+ demonitor(MRef, [flush]),
[L,C,S];
{'DOWN', MRef, process, _, _} = T ->
error(T)
@@ -137,13 +137,7 @@ listen(FPid) ->
LPid = self(),
spawn(fun() -> connect1(PortNr, FPid, LPid) end), %% connecting process
Id = assoc(Sock),
- ?SCTP(Sock, {[#sctp_sndrcvinfo{assoc_id = Id}], _Bin})
- = recv(). %% Waits with this as current_function.
-
-%% recv/0
-
-recv() ->
- receive T -> T end.
+ recv(Sock, Id).
%% connect1/3
@@ -154,7 +148,7 @@ connect1(PortNr, FPid, LPid) ->
FPid ! {?MODULE,
self(),
spawn(fun() -> send(Sock, Id) end)}, %% sending process
- MRef = erlang:monitor(process, LPid),
+ MRef = monitor(process, LPid),
down(MRef). %% Waits with this as current_function.
%% down/1
@@ -277,7 +271,8 @@ acc(N, Acc) ->
loop(Sock, MRef, Bin) ->
receive
- ?SCTP(Sock, {[#sctp_sndrcvinfo{assoc_id = Id}], B}) ->
+ ?SCTP(Sock, {[#sctp_sndrcvinfo{assoc_id = Id}], B})
+ when is_binary(B) ->
Sz = size(Bin),
{Sz, Bin} = {size(B), B}, %% assert
ok = send(Sock, Id, mark(Bin)),
@@ -291,7 +286,7 @@ loop(Sock, MRef, Bin) ->
%% connect2/3
connect2(Pid, PortNr, Bin) ->
- erlang:monitor(process, Pid),
+ monitor(process, Pid),
{ok, Sock} = open(),
ok = gen_sctp:connect_init(Sock, ?ADDR, PortNr, []),
@@ -301,19 +296,25 @@ connect2(Pid, PortNr, Bin) ->
%% T2 = time after listening process received our message
%% T3 = time after reply is received
- T1 = now(),
+ T1 = diameter_util:timestamp(),
ok = send(Sock, Id, Bin),
T2 = unmark(recv(Sock, Id)),
- T3 = now(),
- {timer:now_diff(T2, T1), timer:now_diff(T3, T2)}. %% {Outbound, Inbound}
+ T3 = diameter_util:timestamp(),
+ {diameter_lib:micro_diff(T2, T1), %% Outbound
+ diameter_lib:micro_diff(T3, T2)}. %% Inbound
%% recv/2
recv(Sock, Id) ->
receive
- ?SCTP(Sock, {[#sctp_sndrcvinfo{assoc_id = Id}], Bin}) ->
+ ?SCTP(Sock, {[#sctp_sndrcvinfo{assoc_id = I}], Bin})
+ when is_binary(Bin) ->
+ Id = I, %% assert
Bin;
- T -> %% eg. 'DOWN'
+ ?SCTP(S, _) ->
+ Sock = S, %% assert
+ recv(Sock, Id);
+ T ->
exit(T)
end.
@@ -325,7 +326,7 @@ send(Sock, Id, Bin) ->
%% mark/1
mark(Bin) ->
- Info = term_to_binary(now()),
+ Info = term_to_binary(diameter_util:timestamp()),
<<Info/binary, Bin/binary>>.
%% unmark/1
diff --git a/lib/diameter/test/diameter_gen_tcp_SUITE.erl b/lib/diameter/test/diameter_gen_tcp_SUITE.erl
index 7e232edb44..4b542e0156 100644
--- a/lib/diameter/test/diameter_gen_tcp_SUITE.erl
+++ b/lib/diameter/test/diameter_gen_tcp_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013. All Rights Reserved.
+%% Copyright Ericsson AB 2014-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -18,10 +18,10 @@
%%
%%
-%% Some gen_sctp-specific tests demonstrating problems that were
+%% Some gen_tcp-specific tests demonstrating problems that were
%% encountered during diameter development but have nothing
-%% specifically to do with diameter. At least one of them can cause
-%% diameter_traffic_SUITE testcases to fail.
+%% specifically to do with diameter. These can cause testcases in
+%% other suites to fail.
%%
-module(diameter_gen_tcp_SUITE).
@@ -30,7 +30,8 @@
all/0]).
%% testcases
--export([send_long/1]).
+-export([send_long/1,
+ connect/1]).
-define(LOOPBACK, {127,0,0,1}).
-define(GEN_OPTS, [binary, {active, true}, {ip, ?LOOPBACK}]).
@@ -41,7 +42,8 @@ suite() ->
[{timetrap, {minutes, 2}}].
all() ->
- [send_long].
+ [connect, %% Appears to fail only when run first.
+ send_long].
%% ===========================================================================
@@ -87,15 +89,6 @@ connect(PortNr, LPid) ->
LPid ! {self(), fun(B) -> send(Sock, B) end},
down(LPid).
-%% down/1
-
-down(Pid)
- when is_pid(Pid) ->
- down(erlang:monitor(process, Pid));
-
-down(MRef) ->
- receive {'DOWN', MRef, process, _, Reason} -> Reason end.
-
%% send/2
%%
%% Send from a spawned process just to avoid sending from the
@@ -104,3 +97,47 @@ down(MRef) ->
send(Sock, Bin) ->
{_, MRef} = spawn_monitor(fun() -> exit(gen_tcp:send(Sock, Bin)) end),
down(MRef).
+
+%% ===========================================================================
+
+%% connect/1
+%%
+%% Test that simultaneous connections succeed. This fails sporadically
+%% on OS X at the time of writing, when gen_tcp:connect/2 returns
+%% {error, econnreset}.
+
+connect(_) ->
+ {ok, LSock} = gen_tcp:listen(0, ?GEN_OPTS),
+ {ok, {_,PortNr}} = inet:sockname(LSock),
+ Count = lists:seq(1,8), %% 8 simultaneous connects
+ As = [gen_accept(LSock) || _ <- Count],
+ %% Wait for spawned processes to have called gen_tcp:accept/1
+ %% (presumably).
+ receive after 2000 -> ok end,
+ Cs = [gen_connect(PortNr) || _ <- Count],
+ [] = failures(Cs),
+ [] = failures(As).
+
+failures(Monitors) ->
+ [RC || {_, MRef} <- Monitors, RC <- [down(MRef)], ok /= element(1, RC)].
+
+gen_accept(LSock) ->
+ spawn_monitor(fun() ->
+ exit(gen_tcp:accept(LSock))
+ end).
+
+gen_connect(PortNr) ->
+ spawn_monitor(fun() ->
+ exit(gen_tcp:connect(?LOOPBACK, PortNr, ?GEN_OPTS))
+ end).
+
+%% ===========================================================================
+
+%% down/1
+
+down(Pid)
+ when is_pid(Pid) ->
+ down(monitor(process, Pid));
+
+down(MRef) ->
+ receive {'DOWN', MRef, process, _, Reason} -> Reason end.
diff --git a/lib/diameter/test/diameter_pool_SUITE.erl b/lib/diameter/test/diameter_pool_SUITE.erl
new file mode 100644
index 0000000000..a59cd66a2e
--- /dev/null
+++ b/lib/diameter/test/diameter_pool_SUITE.erl
@@ -0,0 +1,133 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+%% Test of the pool_size option in connecting nodes with multiple
+%% connections.
+%%
+
+-module(diameter_pool_SUITE).
+
+-export([suite/0,
+ all/0,
+ init_per_testcase/2,
+ end_per_testcase/2,
+ init_per_suite/1,
+ end_per_suite/1]).
+
+%% testcases
+-export([tcp_connect/1,
+ sctp_connect/1,
+ any_connect/1]).
+
+%% ===========================================================================
+
+-define(util, diameter_util).
+
+%% Config for diameter:start_service/2.
+-define(SERVICE(Host),
+ [{'Origin-Host', Host ++ ".ericsson.com"},
+ {'Origin-Realm', "ericsson.com"},
+ {'Host-IP-Address', [{127,0,0,1}]},
+ {'Vendor-Id', 12345},
+ {'Product-Name', "OTP/diameter"},
+ {'Auth-Application-Id', [0]}, %% common
+ {'Acct-Application-Id', [3]}, %% accounting
+ {restrict_connections, false},
+ {application, [{alias, common},
+ {dictionary, diameter_gen_base_rfc6733},
+ {module, diameter_callback}]},
+ {application, [{alias, accounting},
+ {dictionary, diameter_gen_acct_rfc6733},
+ {module, diameter_callback}]}]).
+
+%% ===========================================================================
+
+suite() ->
+ [{timetrap, {seconds, 30}}].
+
+all() ->
+ [tcp_connect,
+ sctp_connect,
+ any_connect].
+
+init_per_testcase(_Name, Config) ->
+ Config.
+
+end_per_testcase(_Name, _Config) ->
+ diameter:stop().
+
+init_per_suite(Config) ->
+ [{sctp, ?util:have_sctp()} | Config].
+
+end_per_suite(_Config) ->
+ ok.
+
+%% ===========================================================================
+
+tcp_connect(_Config) ->
+ connect(tcp, tcp).
+
+sctp_connect(Config) ->
+ case lists:member({sctp, true}, Config) of
+ true -> connect(sctp, sctp);
+ false -> {skip, no_sctp}
+ end.
+
+any_connect(_Config) ->
+ connect(any, tcp).
+
+%% connect/2
+
+%% Establish multiple connections between a client and server.
+connect(ClientProt, ServerProt) ->
+ ok = diameter:start(),
+ [] = [{S,T} || S <- ["server", "client"],
+ T <- [diameter:start_service(S, ?SERVICE(S))],
+ T /= ok],
+ %% Listen with a single transport with pool_size = 4. Ensure the
+ %% expected number of transport processes are started.
+ LRef = ?util:listen("server", ServerProt, [{pool_size, 4}]),
+ {4,0} = count("server", LRef, accept), %% 4 transports, no connections
+ %% Establish 5 connections.
+ Ref = ?util:connect("client", ClientProt, LRef, [{pool_size, 5}]),
+ {5,5} = count("client", Ref, pool), %% 5 connections
+ %% Ensure the server has started replacement transports within a
+ %% reasonable time. Sleepsince there's no guarantee the
+ %% replacements have been started before the client has received
+ %% 'up' events. (Although it's likely.)
+ sleep(),
+ {9,5} = count("server", LRef, accept), %% 5 connections + 4 accepting
+ %% Ensure ther are still the expected number of accepting transports
+ %% after stopping the client service.
+ ok = diameter:stop_service("client"),
+ sleep(),
+ {4,0} = count("server", LRef, accept), %% 4 transports, no connections
+ %% Done.
+ ok = diameter:stop_service("server").
+
+count(Name, Ref, Key) ->
+ [{transport, [[{ref, Ref} | T]]},
+ {connections, Cs}]
+ = diameter:service_info(Name, [transport, connections]),
+ {Key, Ps} = lists:keyfind(Key, 1, T),
+ {length(Ps), length(Cs)}. %% number of processes, connections
+
+sleep() ->
+ receive after 1000 -> ok end.
diff --git a/lib/diameter/test/diameter_traffic_SUITE.erl b/lib/diameter/test/diameter_traffic_SUITE.erl
index 4b67372016..9822b95301 100644
--- a/lib/diameter/test/diameter_traffic_SUITE.erl
+++ b/lib/diameter/test/diameter_traffic_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -414,12 +414,13 @@ send_eval(Config) ->
= call(Config, Req).
%% Send an accounting ACR that the server tries to answer with an
-%% inappropriate header, resulting in no answer being sent and the
-%% request timing out.
+%% inappropriate header. That the error is detected is coded in
+%% handle_answer.
send_bad_answer(Config) ->
Req = ['ACR', {'Accounting-Record-Type', ?EVENT_RECORD},
{'Accounting-Record-Number', 2}],
- {timeout, _} = call(Config, Req).
+ ?answer_message(?SUCCESS)
+ = call(Config, Req).
%% Send an ACR that the server callback answers explicitly with a
%% protocol error.
@@ -759,7 +760,7 @@ call(Config, Req, Opts) ->
diameter:call(?CLIENT,
dict(Req, Dict0),
msg(Req, ReqEncoding, Dict0),
- [{extra, [{Name, Group}, now()]} | Opts]).
+ [{extra, [{Name, Group}, diameter_lib:now()]} | Opts]).
origin({A,C}) ->
2*codec(A) + container(C);
@@ -1057,15 +1058,12 @@ answer(Pkt, Req, _Peer, Name, #group{client_dict0 = Dict0}) ->
[R | Vs] = Dict:'#get-'(answer(Ans, Es, Name)),
[Dict:rec2msg(R) | Vs].
-answer(Rec, [_|_], N)
- when N == send_long_avp_length;
- N == send_short_avp_length;
- N == send_zero_avp_length;
- N == send_invalid_avp_length;
- N == send_invalid_reject;
- N == send_unknown_short_mandatory;
- N == send_unexpected_mandatory_decode ->
+%% An inappropriate E-bit results in a decode error ...
+answer(Rec, Es, send_bad_answer) ->
+ [{5004, #diameter_avp{name = 'Result-Code'}} | _] = Es,
Rec;
+
+%% ... while other errors are reflected in Failed-AVP.
answer(Rec, [], _) ->
Rec.
@@ -1078,8 +1076,10 @@ app(Req, _, Dict0) ->
%% handle_error/6
handle_error(timeout = Reason, _Req, ?CLIENT, _Peer, _, Time) ->
- Now = now(),
- {Reason, {Time, Now, timer:now_diff(Now, Time)}};
+ Now = diameter_lib:now(),
+ {Reason, {diameter_lib:timestamp(Time),
+ diameter_lib:timestamp(Now),
+ diameter_lib:micro_diff(Now, Time)}};
handle_error(Reason, _Req, ?CLIENT, _Peer, _, _Time) ->
{error, Reason}.
diff --git a/lib/diameter/test/diameter_transport_SUITE.erl b/lib/diameter/test/diameter_transport_SUITE.erl
index fcffa69c24..f098851bea 100644
--- a/lib/diameter/test/diameter_transport_SUITE.erl
+++ b/lib/diameter/test/diameter_transport_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -53,7 +53,7 @@
%% Receive a message.
-define(RECV(Pat, Ret), receive Pat -> Ret end).
--define(RECV(Pat), ?RECV(Pat, now())).
+-define(RECV(Pat), ?RECV(Pat, diameter_util:timestamp())).
%% Sockets are opened on the loopback address.
-define(ADDR, {127,0,0,1}).
@@ -104,7 +104,7 @@ tc() ->
reconnect].
init_per_suite(Config) ->
- [{sctp, have_sctp()} | Config].
+ [{sctp, ?util:have_sctp()} | Config].
end_per_suite(_Config) ->
ok.
@@ -127,7 +127,10 @@ tcp_accept(_) ->
accept(tcp).
sctp_accept(Config) ->
- if_sctp(fun accept/1, Config).
+ case lists:member({sctp, true}, Config) of
+ true -> accept(sctp);
+ false -> {skip, no_sctp}
+ end.
%% Start multiple accepting transport processes that are connected to
%% with an equal number of connecting processes using gen_tcp/sctp
@@ -157,7 +160,10 @@ tcp_connect(_) ->
connect(tcp).
sctp_connect(Config) ->
- if_sctp(fun connect/1, Config).
+ case lists:member({sctp, true}, Config) of
+ true -> connect(sctp);
+ false -> {skip, no_sctp}
+ end.
connect(Prot) ->
T = {Prot, make_ref()},
@@ -219,7 +225,7 @@ reconnect(_) ->
|| T <- [listen, connect]]).
start_service(SvcName) ->
- OH = io_lib:format("~p-~p-~p", tuple_to_list(now())),
+ OH = diameter_util:unique_string(),
Opts = [{application, [{dictionary, diameter_gen_base_rfc6733},
{module, diameter_callback}]},
{'Origin-Host', OH},
@@ -251,28 +257,6 @@ abort(SvcName, LRef, Ref)
%% ===========================================================================
%% ===========================================================================
-%% have_sctp/0
-
-have_sctp() ->
- case gen_sctp:open() of
- {ok, Sock} ->
- gen_sctp:close(Sock),
- true;
- {error, E} when E == eprotonosupport;
- E == esocktnosupport -> %% fail on any other reason
- false
- end.
-
-%% if_sctp/2
-
-if_sctp(F, Config) ->
- case proplists:get_value(sctp, Config) of
- true ->
- F(sctp);
- false ->
- {skip, no_sctp}
- end.
-
%% init/2
init(accept, {Prot, Ref}) ->
@@ -351,7 +335,7 @@ make_msg() ->
%% crypto:rand_bytes/1 isn't available on all platforms (since openssl
%% isn't) so roll our own.
rand_bytes(N) ->
- random:seed(now()),
+ random:seed(diameter_util:seed()),
rand_bytes(N, <<>>).
rand_bytes(0, Bin) ->
@@ -381,37 +365,14 @@ start_connect(tcp, T, Svc, Opts) ->
diameter_tcp:start(T, Svc, Opts).
%% start_accept/2
-%%
-%% Start transports sequentially by having each wait for a message
-%% from a job in a queue before commencing. Only one transport with a
-%% pending accept is started at a time since diameter_{tcp,sctp}
-%% currently assume (and diameter currently implements) this.
start_accept(Prot, Ref) ->
- Pid = sync(accept, Ref),
{Mod, Opts} = tmod(Prot),
-
- try
- {ok, TPid, [?ADDR]} = Mod:start({accept, Ref},
- ?SVC([?ADDR]),
- [{port, 0} | Opts]),
- ?RECV(?TMSG({TPid, connected})),
- TPid
- after
- Pid ! Ref
- end.
-
-sync(What, Ref) ->
- ok = diameter_sync:cast({?MODULE, What, Ref},
- [fun lock/2, Ref, self()],
- infinity,
- infinity),
- receive {start, Ref, Pid} -> Pid end.
-
-lock(Ref, Pid) ->
- Pid ! {start, Ref, self()},
- erlang:monitor(process, Pid),
- Ref = receive T -> T end.
+ {ok, TPid, [?ADDR]} = Mod:start({accept, Ref},
+ ?SVC([?ADDR]),
+ [{port, 0} | Opts]),
+ ?RECV(?TMSG({TPid, connected})),
+ TPid.
tmod(sctp) ->
{diameter_sctp, [{sctp_initmsg, ?SCTP_INIT}]};
@@ -454,7 +415,7 @@ gen_accept(tcp, LSock) ->
gen_send(sctp, Sock, Bin) ->
{OS, _IS, Id} = getr(assoc),
- {_, _, Us} = now(),
+ {_, _, Us} = diameter_util:timestamp(),
gen_sctp:send(Sock, Id, Us rem OS, Bin);
gen_send(tcp, Sock, Bin) ->
gen_tcp:send(Sock, Bin).
@@ -463,7 +424,11 @@ gen_send(tcp, Sock, Bin) ->
gen_recv(sctp, Sock) ->
{_OS, _IS, Id} = getr(assoc),
- ?RECV(?SCTP(Sock, {[#sctp_sndrcvinfo{assoc_id = Id}], Bin}), Bin);
+ receive
+ ?SCTP(Sock, {[#sctp_sndrcvinfo{assoc_id = Id}], Bin})
+ when is_binary(Bin) ->
+ Bin
+ end;
gen_recv(tcp, Sock) ->
tcp_recv(Sock, <<>>).
diff --git a/lib/diameter/test/diameter_util.erl b/lib/diameter/test/diameter_util.erl
index 92c72c84e7..c496876ee1 100644
--- a/lib/diameter/test/diameter_util.erl
+++ b/lib/diameter/test/diameter_util.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -29,7 +29,11 @@
run/1,
fold/3,
foldl/3,
- scramble/1]).
+ scramble/1,
+ timestamp/0,
+ seed/0,
+ unique_string/0,
+ have_sctp/0]).
%% diameter-specific
-export([lport/2,
@@ -174,7 +178,7 @@ scramble(L) ->
[[fun s/1, L]]).
s(L) ->
- random:seed(now()),
+ random:seed(seed()),
s([], L).
s(Acc, []) ->
@@ -184,6 +188,44 @@ s(Acc, L) ->
s([T|Acc], H ++ Rest).
%% ---------------------------------------------------------------------------
+%% timestamp/0
+
+timestamp() ->
+ diameter_lib:timestamp(diameter_lib:now()).
+
+%% ---------------------------------------------------------------------------
+%% seed/0
+
+seed() ->
+ {_,T} = diameter_lib:seed(),
+ T.
+
+%% ---------------------------------------------------------------------------
+%% unique_string/0
+
+unique_string() ->
+ us(diameter_lib:now()).
+
+us({M,S,U}) ->
+ tl(lists:append(["-" ++ integer_to_list(N) || N <- [M,S,U]]));
+
+us(MonoT) ->
+ integer_to_list(MonoT).
+
+%% ---------------------------------------------------------------------------
+%% have_sctp/0
+
+have_sctp() ->
+ case gen_sctp:open() of
+ {ok, Sock} ->
+ gen_sctp:close(Sock),
+ true;
+ {error, E} when E == eprotonosupport;
+ E == esocktnosupport -> %% fail on any other reason
+ false
+ end.
+
+%% ---------------------------------------------------------------------------
%% eval/1
%%
%% Evaluate a function in one of a number of forms.
@@ -254,13 +296,12 @@ path(Config, Name) ->
%%
%% Lookup the port number of a tcp/sctp listening transport.
-lport(M, {Node, Ref}) ->
- rpc:call(Node, ?MODULE, lport, [M, Ref]);
+lport(Prot, {Node, Ref}) ->
+ rpc:call(Node, ?MODULE, lport, [Prot, Ref]);
lport(Prot, Ref) ->
- Mod = tmod(Prot),
[_] = diameter_reg:wait({'_', listener, {Ref, '_'}}),
- [N || {listen, N, _} <- Mod:ports(Ref)].
+ [N || M <- tmod(Prot), {listen, N, _} <- M:ports(Ref)].
%% ---------------------------------------------------------------------------
%% listen/2-3
@@ -292,13 +333,17 @@ connect(Client, Prot, LRef, Opts) ->
Ref = add_transport(Client, {connect, opts(Prot, PortNr) ++ Opts}),
true = transport(Client, Ref), %% assert
- ok = receive
- {diameter_event, Client, {up, Ref, _, _, _}} -> ok
- after 10000 ->
- {Client, Prot, PortNr, process_info(self(), messages)}
- end,
+ diameter_lib:for_n(fun(_) -> ok = up(Client, Ref, Prot, PortNr) end,
+ proplists:get_value(pool_size, Opts, 1)),
Ref.
+up(Client, Ref, Prot, PortNr) ->
+ receive
+ {diameter_event, Client, {up, Ref, _, _, _}} -> ok
+ after 10000 ->
+ {Client, Prot, PortNr, process_info(self(), messages)}
+ end.
+
transport(SvcName, Ref) ->
[Ref] == [R || [{ref, R} | _] <- diameter:service_info(SvcName, transport),
R == Ref].
@@ -327,13 +372,15 @@ add_transport(SvcName, T) ->
Ref.
tmod(tcp) ->
- diameter_tcp;
+ [diameter_tcp];
tmod(sctp) ->
- diameter_sctp.
+ [diameter_sctp];
+tmod(any) ->
+ [diameter_sctp, diameter_tcp].
opts(Prot, T) ->
- [{transport_module, tmod(Prot)},
- {transport_config, [{ip, ?ADDR}, {port, 0} | opts(T)]}].
+ [{transport_module, M} || M <- tmod(Prot)]
+ ++ [{transport_config, [{ip, ?ADDR}, {port, 0} | opts(T)]}].
opts(listen) ->
[{accept, M} || M <- [{256,0,0,1}, ["256.0.0.1", ["^.+$"]]]];
diff --git a/lib/diameter/test/diameter_watchdog_SUITE.erl b/lib/diameter/test/diameter_watchdog_SUITE.erl
index b6e8730ec2..5a3ff2c92f 100644
--- a/lib/diameter/test/diameter_watchdog_SUITE.erl
+++ b/lib/diameter/test/diameter_watchdog_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -420,6 +420,7 @@ suspect(TRef, false, SvcName, N) ->
%% abuse/1
abuse(F) ->
+
[] = run([[abuse, F, T] || T <- [listen, connect]]).
abuse(F, [_,_,_|_] = Args) ->
@@ -672,7 +673,8 @@ jitter(T,D) ->
%% Generate a unique hostname for the faked peer.
hostname() ->
- lists:flatten(io_lib:format("~p-~p-~p", tuple_to_list(now()))).
+ {M,S,U} = diameter_util:timestamp(),
+ lists:flatten(io_lib:format("~p-~p-~p", [M,S,U])).
putr(Key, Val) ->
put({?MODULE, Key}, Val).
diff --git a/lib/diameter/test/modules.mk b/lib/diameter/test/modules.mk
index 4fea62461c..6da96bd676 100644
--- a/lib/diameter/test/modules.mk
+++ b/lib/diameter/test/modules.mk
@@ -1,8 +1,7 @@
-#-*-makefile-*- ; force emacs to enter makefile-mode
# %CopyrightBegin%
#
-# Copyright Ericsson AB 2010-2013. All Rights Reserved.
+# Copyright Ericsson AB 2010-2015. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -40,6 +39,7 @@ MODULES = \
diameter_gen_sctp_SUITE \
diameter_gen_tcp_SUITE \
diameter_length_SUITE \
+ diameter_pool_SUITE \
diameter_reg_SUITE \
diameter_relay_SUITE \
diameter_stats_SUITE \
diff --git a/lib/edoc/doc/overview.edoc b/lib/edoc/doc/overview.edoc
index 2af425272e..0ced8cab32 100644
--- a/lib/edoc/doc/overview.edoc
+++ b/lib/edoc/doc/overview.edoc
@@ -76,8 +76,6 @@ The following are the main functions for running EDoc:
<ul>
<li>{@link edoc:application/2}: Creates documentation for a
typical Erlang application.</li>
- <li>{@link edoc:packages/2}: Creates documentation for one or
- more packages, automatically locating source files.</li>
<li>{@link edoc:files/2}: Creates documentation for a
specified set of source files.</li>
<li>{@link edoc:run/3}: General interface function; the common
@@ -184,7 +182,7 @@ The following tags can be used anywhere within a module:
path (see {@link edoc:read_source/2}).</dd>
<dt><a name="gtag-todo">`@todo' (or `@TODO')</a></dt>
- <dd>Attaches a To-Do note to a function, module, package, or
+ <dd>Attaches a To-Do note to a function, module or
overview-page. The content can be any XHTML text describing
the issue, e.g.:
```%% @TODO Finish writing the documentation.'''
@@ -338,7 +336,7 @@ The following tags can be used before a module declaration:
<dt><a name="mtag-since">`@since'</a></dt>
<dd>Specifies when the module was introduced, with respect to
- the application, package, release or distribution it is part
+ the application, release or distribution it is part
of. The content can be arbitrary text.</dd>
<dt><a name="mtag-version">`@version'</a></dt>
@@ -445,7 +443,6 @@ possible formats for references are:
<table border="1" summary="reference syntax">
<tr><th>Reference syntax</th><th>Example</th><th>Scope</th></tr>
<tr><td>`Module'</td><td>{@link edoc_run}, `erl.lang.list'</td><td>Global</td></tr>
- <tr><td>`Package.*'</td><td>`erl.lang.*'</td><td>Global</td></tr>
<tr><td>`Function/Arity'</td><td>`file/2'</td><td>Within module</td></tr>
<tr><td>`Module:Function/Arity'</td><td>{@link edoc:application/2}</td><td>Global</td></tr>
<tr><td>`Type()'</td><td>`filename()'</td><td>Within module</td></tr>
@@ -531,7 +528,7 @@ after the empty line into separate paragraphs. For example:
```%% @doc This will all be part of the first paragraph.
%% It can stretch over several lines and contain <em>any
%% XHTML markup</em>.
- %%
+ %%
%% This is the second paragraph. The above line is
%% regarded as "empty" by EDoc, even though it ends with
%% a space.'''
@@ -685,17 +682,6 @@ information. User-defined macros override predefined macros.
<dd>Expands to the current date, as "<tt>Month Day Year</tt>",
e.g. "{@date}".</dd>
- <dt><a name="predefmacro-docRoot"><code>@{@docRoot}</code></a></dt>
- <dd>Expands to the relative URL path (such as
- `"../../.."') from the current page to the root
- directory of the generated documentation. This can be used to
- create XHTML references such as `<img
- src="@{@docRoot}/images/logo.jpeg">' that are independent of how
- deep down in a package structure they occur. If packages are not
- used (i.e., if all modules are in the "empty" package),
- <code>@{@docRoot}</code> will always resolve to the empty
- string.</dd>
-
<dt><a name="predefmacro-link"><code>@{@link <em>reference</em>.
<em>description</em>}</code></a></dt>
<dd>This creates a hypertext link; cf. the
@@ -710,9 +696,6 @@ information. User-defined macros override predefined macros.
<dd>Expands to the name of the current module. Only defined when a
module is being processed.</dd>
- <dt><a name="predefmacro-package"><code>@{@package}</code></a></dt>
- <dd>Expands to the name of the current package.</dd>
-
<dt><a name="predefmacro-section"><code>@{@section
<em>heading</em>}</code></a></dt>
<dd>Expands to a hypertext link to the specified section heading;
diff --git a/lib/edoc/include/edoc_doclet.hrl b/lib/edoc/include/edoc_doclet.hrl
index 60ec7f44e4..ac6763fb33 100644
--- a/lib/edoc/include/edoc_doclet.hrl
+++ b/lib/edoc/include/edoc_doclet.hrl
@@ -1,6 +1,6 @@
%% =====================================================================
%% Header file for EDoc doclet modules.
-%%
+%%
%% Copyright (C) 2001-2004 Richard Carlsson
%%
%% This library is free software; you can redistribute it and/or modify
@@ -43,16 +43,11 @@
%% @type doclet_gen() = #doclet_gen{sources = [string()],
%% app = no_app() | atom(),
-%% packages = [atom()],
-%% modules = [atom()],
-%% modules = [atom()],
-%% filemap = function()}
+%% modules = [atom()]}
-record(doclet_gen, {sources = [],
app = ?NO_APP,
- packages = [],
- modules = [],
- filemap
+ modules = []
}).
%% @type doclet_toc() = #doclet_gen{paths = [string()],
diff --git a/lib/edoc/priv/edoc.dtd b/lib/edoc/priv/edoc.dtd
index ba4ac0db28..4278a9e643 100644
--- a/lib/edoc/priv/edoc.dtd
+++ b/lib/edoc/priv/edoc.dtd
@@ -2,20 +2,13 @@
<!-- EDoc DTD Version 0.3 -->
<!ELEMENT overview (title, description?, author*, copyright?, version?,
- since?, see*, reference*, todo?, packages, modules)>
+ since?, see*, reference*, todo?, modules)>
<!ATTLIST overview
root CDATA #IMPLIED
encoding CDATA #IMPLIED>
<!ELEMENT title (#PCDATA)>
-<!ELEMENT package (description?, author*, copyright?, version?,
- since?, deprecated?, see*, reference*, todo?,
- modules)>
-<!ATTLIST package
- name CDATA #REQUIRED
- root CDATA #IMPLIED>
-
<!ELEMENT modules (module+)>
diff --git a/lib/edoc/priv/stylesheet.css b/lib/edoc/priv/stylesheet.css
index e426a90483..ab170c091f 100644
--- a/lib/edoc/priv/stylesheet.css
+++ b/lib/edoc/priv/stylesheet.css
@@ -27,10 +27,10 @@ div.spec {
margin-left: 2em;
background-color: #eeeeee;
}
-a.module,a.package {
+a.module {
text-decoration:none
}
-a.module:hover,a.package:hover {
+a.module:hover {
background-color: #eeeeee;
}
ul.definitions {
diff --git a/lib/edoc/src/edoc.erl b/lib/edoc/src/edoc.erl
index 983f04e8b6..78915e8943 100644
--- a/lib/edoc/src/edoc.erl
+++ b/lib/edoc/src/edoc.erl
@@ -24,12 +24,11 @@
%% TODO: option for ignoring functions matching some pattern ('..._test_'/0)
%% TODO: @private_type tag, opaque unless generating private docs?
%% TODO: document the record type syntax
-%% TODO: some 'skip' option for ignoring particular modules/packages?
-%% TODO: intermediate-level packages: document even if no local sources.
+%% TODO: some 'skip' option for ignoring particular modules?
%% TODO: multiline comment support (needs modified comment representation)
%% TODO: config-file for default settings
%% TODO: config: locations of all local docdirs; generate local doc-index page
-%% TODO: config: URL:s of offline packages/apps
+%% TODO: config: URL:s of offline apps
%% TODO: config: default stylesheet
%% TODO: config: default header/footer, etc.
%% TODO: offline linkage
@@ -45,10 +44,10 @@
-module(edoc).
--export([packages/1, packages/2, files/1, files/2,
+-export([files/1, files/2,
application/1, application/2, application/3,
toc/1, toc/2, toc/3,
- run/3,
+ run/2,
file/1, file/2,
read/1, read/2,
layout/1, layout/2,
@@ -68,15 +67,15 @@
file(Name) ->
file(Name, []).
-%% @spec file(filename(), proplist()) -> ok
+%% @spec file(filename(), proplist()) -> ok
%%
%% @type filename() = //kernel/file:filename()
%% @type proplist() = [term()]
%%
%% @deprecated This is part of the old interface to EDoc and is mainly
%% kept for backwards compatibility. The preferred way of generating
-%% documentation is through one of the functions {@link application/2},
-%% {@link packages/2} and {@link files/2}.
+%% documentation is through one of the functions {@link application/2}
+%% and {@link files/2}.
%%
%% @doc Reads a source code file and outputs formatted documentation to
%% a corresponding file.
@@ -121,44 +120,24 @@ file(Name, Options) ->
?DEFAULT_FILE_SUFFIX),
Dir = proplists:get_value(dir, Options, filename:dirname(Name)),
Encoding = [{encoding, edoc_lib:read_encoding(Name, [])}],
- edoc_lib:write_file(Text, Dir, BaseName ++ Suffix, '', Encoding).
+ edoc_lib:write_file(Text, Dir, BaseName ++ Suffix, Encoding).
-%% TODO: better documentation of files/1/2, packages/1/2, application/1/2/3
+%% TODO: better documentation of files/1/2, application/1/2/3
-%% @spec (Files::[filename() | {package(), [filename()]}]) -> ok
-%% @equiv packages(Packages, [])
+%% @spec (Files::[filename()]) -> ok
files(Files) ->
files(Files, []).
-%% @spec (Files::[filename() | {package(), [filename()]}],
+%% @spec (Files::[filename()],
%% Options::proplist()) -> ok
-%% @doc Runs EDoc on a given set of source files. See {@link run/3} for
+%% @doc Runs EDoc on a given set of source files. See {@link run/2} for
%% details, including options.
%% @equiv run([], Files, Options)
files(Files, Options) ->
- run([], Files, Options).
-
-%% @spec (Packages::[package()]) -> ok
-%% @equiv packages(Packages, [])
-
-packages(Packages) ->
- packages(Packages, []).
-
-%% @spec (Packages::[package()], Options::proplist()) -> ok
-%% @type package() = atom() | string()
-%%
-%% @doc Runs EDoc on a set of packages. The `source_path' option is used
-%% to locate the files; see {@link run/3} for details, including
-%% options. This function automatically appends the current directory to
-%% the source path.
-%%
-%% @equiv run(Packages, [], Options)
-
-packages(Packages, Options) ->
- run(Packages, [], Options ++ [{source_path, [?CURRENT_DIR]}]).
+ run(Files, Options).
%% @spec (Application::atom()) -> ok
%% @equiv application(Application, [])
@@ -194,7 +173,7 @@ application(App, Options) when is_atom(App) ->
%% subdirectory, if it exists, or otherwise in the application
%% directory itself.
%% </li>
-%% <li>The {@link run/3. `subpackages'} option is turned on. All found
+%% <li>The {@link run/2. `subpackages'} option is turned on. All found
%% source files will be processed.
%% </li>
%% <li>The `include' subdirectory is automatically added to the
@@ -203,7 +182,7 @@ application(App, Options) when is_atom(App) ->
%% </li>
%% </ul>
%%
-%% See {@link run/3} for details, including options.
+%% See {@link run/2} for details, including options.
%%
%% @see application/2
@@ -219,7 +198,7 @@ application(App, Dir, Options) when is_atom(App) ->
{includes, [filename:join(Dir, "include")]}],
Opts1 = set_app_default(App, Dir, Opts),
%% Recursively document all subpackages of '' - i.e., everything.
- run([''], [], [{application, App} | Opts1]).
+ run([], [{application, App} | Opts1]).
%% Try to set up a default application base URI in a smart way if the
%% user has not specified it explicitly.
@@ -240,31 +219,20 @@ set_app_default(App, Dir0, Opts) ->
Opts
end.
-%% If no source files are found for a (specified) package, no package
-%% documentation will be generated either (even if there is a
-%% package-documentation file). This is the way it should be. For
-%% specified files, use empty package (unless otherwise specified). The
-%% assumed package is always used for creating the output. If the actual
-%% module or package of the source differs from the assumption gathered
-%% from the path and file name, a warning should be issued (since links
-%% are likely to be incorrect).
-
opt_defaults() ->
- [packages].
+ [].
opt_negations() ->
[{no_preprocess, preprocess},
{no_subpackages, subpackages},
- {no_report_missing_types, report_missing_types},
- {no_packages, packages}].
+ {no_report_missing_types, report_missing_types}].
-%% @spec run(Packages::[package()],
-%% Files::[filename() | {package(), [filename()]}],
+%% @spec run(Files::[filename()],
%% Options::proplist()) -> ok
-%% @doc Runs EDoc on a given set of source files and/or packages. Note
+%% @doc Runs EDoc on a given set of source files. Note
%% that the doclet plugin module has its own particular options; see the
%% `doclet' option below.
-%%
+%%
%% Also see {@link layout/2} for layout-related options, and
%% {@link get_doc/2} for options related to reading source
%% files.
@@ -298,11 +266,6 @@ opt_negations() ->
%% The default doclet module is {@link edoc_doclet}; see {@link
%% edoc_doclet:run/2} for doclet-specific options.
%% </dd>
-%% <dt>{@type {exclude_packages, [package()]@}}
-%% </dt>
-%% <dd>Lists packages to be excluded from the documentation. Typically
-%% used in conjunction with the `subpackages' option.
-%% </dd>
%% <dt>{@type {file_suffix, string()@}}
%% </dt>
%% <dd>Specifies the suffix used for output files. The default value is
@@ -314,22 +277,6 @@ opt_negations() ->
%% target directory will be ignored and overwritten. The default
%% value is `false'.
%% </dd>
-%% <dt>{@type {packages, boolean()@}}
-%% </dt>
-%% <dd>If the value is `true', it it assumed that packages (module
-%% namespaces) are being used, and that the source code directory
-%% structure reflects this. The default value is `true'. (Usually,
-%% this does the right thing even if all the modules belong to the
-%% top-level "empty" package.) `no_packages' is an alias for
-%% `{packages, false}'. See the `subpackages' option below for
-%% further details.
-%%
-%% If the source code is organized in a hierarchy of
-%% subdirectories although it does not use packages, use
-%% `no_packages' together with the recursive-search `subpackages'
-%% option (on by default) to automatically generate documentation
-%% for all the modules.
-%% </dd>
%% <dt>{@type {source_path, [filename()]@}}
%% </dt>
%% <dd>Specifies a list of file system paths used to locate the source
@@ -345,7 +292,7 @@ opt_negations() ->
%% <dd>If the value is `true', all subpackages of specified packages
%% will also be included in the documentation. The default value is
%% `false'. `no_subpackages' is an alias for `{subpackages,
-%% false}'. See also the `exclude_packages' option.
+%% false}'.
%%
%% Subpackage source files are found by recursively searching
%% for source code files in subdirectories of the known source code
@@ -358,38 +305,31 @@ opt_negations() ->
%% </dl>
%%
%% @see files/2
-%% @see packages/2
%% @see application/2
%% NEW-OPTIONS: source_path, application
%% INHERIT-OPTIONS: init_context/1
%% INHERIT-OPTIONS: expand_sources/2
%% INHERIT-OPTIONS: target_dir_info/5
-%% INHERIT-OPTIONS: edoc_lib:find_sources/3
+%% INHERIT-OPTIONS: edoc_lib:find_sources/2
%% INHERIT-OPTIONS: edoc_lib:run_doclet/2
%% INHERIT-OPTIONS: edoc_lib:get_doc_env/4
-run(Packages, Files, Opts0) ->
+run(Files, Opts0) ->
Opts = expand_opts(Opts0),
Ctxt = init_context(Opts),
Dir = Ctxt#context.dir,
Path = proplists:append_values(source_path, Opts),
- Ss = sources(Path, Packages, Opts),
+ Ss = sources(Path, Opts),
{Ss1, Ms} = expand_sources(expand_files(Files) ++ Ss, Opts),
- Ps = [P || {_, P, _, _} <- Ss1],
App = proplists:get_value(application, Opts, ?NO_APP),
- {App1, Ps1, Ms1} = target_dir_info(Dir, App, Ps, Ms, Opts),
- %% The "empty package" is never included in the list of packages.
- Ps2 = edoc_lib:unique(lists:sort(Ps1)) -- [''],
+ {App1, Ms1} = target_dir_info(Dir, App, Ms, Opts),
Ms2 = edoc_lib:unique(lists:sort(Ms1)),
- Fs = package_files(Path, Ps2),
- Env = edoc_lib:get_doc_env(App1, Ps2, Ms2, Opts),
+ Env = edoc_lib:get_doc_env(App1, Ms2, Opts),
Ctxt1 = Ctxt#context{env = Env},
Cmd = #doclet_gen{sources = Ss1,
app = App1,
- packages = Ps2,
- modules = Ms2,
- filemap = Fs
+ modules = Ms2
},
F = fun (M) ->
M:run(Cmd, Ctxt1)
@@ -401,42 +341,22 @@ expand_opts(Opts0) ->
Opts0 ++ opt_defaults()).
%% NEW-OPTIONS: dir
-%% DEFER-OPTIONS: run/3
+%% DEFER-OPTIONS: run/2
init_context(Opts) ->
#context{dir = proplists:get_value(dir, Opts, ?CURRENT_DIR),
opts = Opts
}.
-%% INHERIT-OPTIONS: edoc_lib:find_sources/3
-
-sources(Path, Packages, Opts) ->
- lists:foldl(fun (P, Xs) ->
- edoc_lib:find_sources(Path, P, Opts) ++ Xs
- end,
- [], Packages).
-
-package_files(Path, Packages) ->
- Name = ?PACKAGE_FILE, % this is hard-coded for now
- D = lists:foldl(fun (P, D) ->
- F = edoc_lib:find_file(Path, P, Name),
- dict:store(P, F, D)
- end,
- dict:new(), Packages),
- fun (P) ->
- case dict:find(P, D) of
- {ok, F} -> F;
- error -> ""
- end
- end.
+%% INHERIT-OPTIONS: edoc_lib:find_sources/2
+
+sources(Path, Opts) ->
+ edoc_lib:find_sources(Path, Opts).
%% Expand user-specified sets of files.
-expand_files([{P, Fs1} | Fs]) ->
- [{P, filename:basename(F), filename:dirname(F)} || F <- Fs1]
- ++ expand_files(Fs);
expand_files([F | Fs]) ->
- [{'', filename:basename(F), filename:dirname(F)} |
+ [{filename:basename(F), filename:dirname(F)} |
expand_files(Fs)];
expand_files([]) ->
[].
@@ -444,26 +364,23 @@ expand_files([]) ->
%% Create the (assumed) full module names. Keep only the first source
%% for each module, but preserve the order of the list.
-%% NEW-OPTIONS: source_suffix, packages
-%% DEFER-OPTIONS: run/3
+%% NEW-OPTIONS: source_suffix
+%% DEFER-OPTIONS: run/2
expand_sources(Ss, Opts) ->
Suffix = proplists:get_value(source_suffix, Opts,
?DEFAULT_SOURCE_SUFFIX),
- Ss1 = case proplists:get_bool(packages, Opts) of
- true -> Ss;
- false -> [{'',F,D} || {_P,F,D} <- Ss]
- end,
+ Ss1 = [{F,D} || {F,D} <- Ss],
expand_sources(Ss1, Suffix, sets:new(), [], []).
-expand_sources([{'', F, D} | Fs], Suffix, S, As, Ms) ->
+expand_sources([{F, D} | Fs], Suffix, S, As, Ms) ->
M = list_to_atom(filename:rootname(F, Suffix)),
case sets:is_element(M, S) of
true ->
expand_sources(Fs, Suffix, S, As, Ms);
false ->
S1 = sets:add_element(M, S),
- expand_sources(Fs, Suffix, S1, [{M, '', F, D} | As],
+ expand_sources(Fs, Suffix, S1, [{M, F, D} | As],
[M | Ms])
end;
expand_sources([], _Suffix, _S, As, Ms) ->
@@ -471,16 +388,15 @@ expand_sources([], _Suffix, _S, As, Ms) ->
%% NEW-OPTIONS: new
-target_dir_info(Dir, App, Ps, Ms, Opts) ->
+target_dir_info(Dir, App, Ms, Opts) ->
case proplists:get_bool(new, Opts) of
true ->
- {App, Ps, Ms};
+ {App, Ms};
false ->
- {App1, Ps1, Ms1} = edoc_lib:read_info_file(Dir),
+ {App1, Ms1} = edoc_lib:read_info_file(Dir),
{if App == ?NO_APP -> App1;
true -> App
end,
- Ps ++ Ps1,
Ms ++ Ms1}
end.
@@ -510,7 +426,7 @@ toc(Dir, Opts) ->
toc(Dir, Paths, Opts0) ->
Opts = expand_opts(Opts0 ++ [{dir, Dir}]),
Ctxt = init_context(Opts),
- Env = edoc_lib:get_doc_env('', [], [], Opts),
+ Env = edoc_lib:get_doc_env('', [], Opts),
Ctxt1 = Ctxt#context{env = Env},
F = fun (M) ->
M:run(#doclet_toc{paths=Paths}, Ctxt1)
@@ -562,7 +478,7 @@ layout(Doc) ->
%% </dl>
%%
%% @see layout/1
-%% @see run/3
+%% @see run/2
%% @see read/2
%% @see file/2
@@ -856,7 +772,7 @@ get_doc(File) ->
%% edoc_lib:get_doc_env/4} for further options.
%%
%% @see get_doc/3
-%% @see run/3
+%% @see run/2
%% @see edoc_extract:source/5
%% @see read/2
%% @see layout/2
diff --git a/lib/edoc/src/edoc.hrl b/lib/edoc/src/edoc.hrl
index 44c5d6fef4..5b0fb68cf9 100644
--- a/lib/edoc/src/edoc.hrl
+++ b/lib/edoc/src/edoc.hrl
@@ -1,6 +1,6 @@
%% =====================================================================
%% Header file for EDoc
-%%
+%%
%% Copyright (C) 2001-2004 Richard Carlsson
%%
%% This library is free software; you can redistribute it and/or modify
@@ -25,9 +25,7 @@
-define(APPLICATION, edoc).
-define(INFO_FILE, "edoc-info").
--define(PACKAGE_FILE, "package.edoc").
-define(OVERVIEW_FILE, "overview.edoc").
--define(PACKAGE_SUMMARY, "package-summary").
-define(DEFAULT_SOURCE_SUFFIX, ".erl").
-define(DEFAULT_FILE_SUFFIX, ".html").
-define(DEFAULT_DOCLET, edoc_doclet).
@@ -65,13 +63,10 @@
%% Environment for generating documentation data
-record(env, {module = [],
- package = [],
root = "",
file_suffix,
- package_summary,
apps,
modules,
- packages,
app_default,
macros = [],
includes = []
diff --git a/lib/edoc/src/edoc_data.erl b/lib/edoc/src/edoc_data.erl
index eceb5cb1bd..b797d74a71 100644
--- a/lib/edoc/src/edoc_data.erl
+++ b/lib/edoc/src/edoc_data.erl
@@ -26,7 +26,7 @@
-module(edoc_data).
--export([module/4, package/4, overview/4, type/2]).
+-export([module/4, overview/4, type/2]).
-export([hidden_filter/2, get_all_tags/1]).
@@ -510,41 +510,14 @@ get_tags(_, []) -> [].
type(T, Env) ->
xmerl_lib:expand_element({type, [edoc_types:to_xml(T, Env)]}).
-%% <!ELEMENT package (description?, author*, copyright?, version?,
-%% since?, deprecated?, see*, reference*, todo?,
-%% modules)>
-%% <!ATTLIST package
-%% name CDATA #REQUIRED
-%% root CDATA #IMPLIED>
-%% <!ELEMENT modules (module+)>
-
-package(Package, Tags, Env, Opts) ->
- Env1 = Env#env{package = Package,
- root = edoc_refs:relative_package_path('', Package)},
- xmerl_lib:expand_element(package_1(Package, Tags, Env1, Opts)).
-
-package_1(Package, Tags, Env, Opts) ->
- {package, [{root, Env#env.root}],
- ([{packageName, [atom_to_list(Package)]}]
- ++ get_doc(Tags)
- ++ authors(Tags)
- ++ get_copyright(Tags)
- ++ get_version(Tags)
- ++ get_since(Tags)
- ++ get_deprecated(Tags)
- ++ sees(Tags, Env)
- ++ references(Tags)
- ++ todos(Tags, Opts))
- }.
-
%% <!ELEMENT overview (title, description?, author*, copyright?, version?,
-%% since?, see*, reference*, todo?, packages, modules)>
+%% since?, see*, reference*, todo?, modules)>
%% <!ATTLIST overview
%% root CDATA #IMPLIED>
%% <!ELEMENT title (#PCDATA)>
overview(Title, Tags, Env, Opts) ->
- Env1 = Env#env{package = '',
+ Env1 = Env#env{
root = ""},
xmerl_lib:expand_element(overview_1(Title, Tags, Env1, Opts)).
diff --git a/lib/edoc/src/edoc_doclet.erl b/lib/edoc/src/edoc_doclet.erl
index 5653b5894b..5961ca8cc0 100644
--- a/lib/edoc/src/edoc_doclet.erl
+++ b/lib/edoc/src/edoc_doclet.erl
@@ -42,9 +42,7 @@
-define(DEFAULT_FILE_SUFFIX, ".html").
-define(INDEX_FILE, "index.html").
-define(OVERVIEW_FILE, "overview.edoc").
--define(PACKAGE_SUMMARY, "package-summary.html").
-define(OVERVIEW_SUMMARY, "overview-summary.html").
--define(PACKAGES_FRAME, "packages-frame.html").
-define(MODULES_FRAME, "modules-frame.html").
-define(STYLESHEET, "stylesheet.css").
-define(IMAGE, "erlang.png").
@@ -52,11 +50,10 @@
-include_lib("xmerl/include/xmerl.hrl").
-%% Sources is the list of inputs in the order they were found. Packages
-%% and Modules are sorted lists of atoms without duplicates. (They
+%% Sources is the list of inputs in the order they were found.
+%% Modules are sorted lists of atoms without duplicates. (They
%% usually include the data from the edoc-info file in the target
-%% directory, if it exists.) Note that the "empty package" is never
-%% included in Packages!
+%% directory, if it exists.)
%% @spec (Command::doclet_gen() | doclet_toc(), edoc_context()) -> ok
%% @doc Main doclet entry point. See the file <a
@@ -117,14 +114,12 @@
run(#doclet_gen{}=Cmd, Ctxt) ->
gen(Cmd#doclet_gen.sources,
Cmd#doclet_gen.app,
- Cmd#doclet_gen.packages,
Cmd#doclet_gen.modules,
- Cmd#doclet_gen.filemap,
Ctxt);
run(#doclet_toc{}=Cmd, Ctxt) ->
toc(Cmd#doclet_toc.paths, Ctxt).
-gen(Sources, App, Packages, Modules, FileMap, Ctxt) ->
+gen(Sources, App, Modules, Ctxt) ->
Dir = Ctxt#context.dir,
Env = Ctxt#context.env,
Options = Ctxt#context.opts,
@@ -132,11 +127,9 @@ gen(Sources, App, Packages, Modules, FileMap, Ctxt) ->
CSS = stylesheet(Options),
{Modules1, Error} = sources(Sources, Dir, Modules, Env, Options),
modules_frame(Dir, Modules1, Title, CSS),
- packages(Packages, Dir, FileMap, Env, Options),
- packages_frame(Dir, Packages, Title, CSS),
overview(Dir, Title, Env, Options),
- index_file(Dir, length(Packages) > 1, Title),
- edoc_lib:write_info_file(App, Packages, Modules1, Dir),
+ index_file(Dir, Title),
+ edoc_lib:write_info_file(App, Modules1, Dir),
copy_stylesheet(Dir, Options),
copy_image(Dir),
%% handle postponed error during processing of source files
@@ -182,19 +175,19 @@ sources(Sources, Dir, Modules, Env, Options) ->
%% set if it was successful. Errors are just flagged at this stage,
%% allowing all source files to be processed even if some of them fail.
-source({M, P, Name, Path}, Dir, Suffix, Env, Set, Private, Hidden,
+source({M, Name, Path}, Dir, Suffix, Env, Set, Private, Hidden,
Error, Options) ->
File = filename:join(Path, Name),
case catch {ok, edoc:get_doc(File, Env, Options)} of
{ok, {Module, Doc}} ->
- check_name(Module, M, P, File),
+ check_name(Module, M, File),
case ((not is_private(Doc)) orelse Private)
andalso ((not is_hidden(Doc)) orelse Hidden) of
true ->
Text = edoc:layout(Doc, Options),
Name1 = atom_to_list(M) ++ Suffix,
Encoding = [{encoding,encoding(Doc)}],
- edoc_lib:write_file(Text, Dir, Name1, P, Encoding),
+ edoc_lib:write_file(Text, Dir, Name1, Encoding),
{sets:add_element(Module, Set), Error};
false ->
{Set, Error}
@@ -204,8 +197,7 @@ source({M, P, Name, Path}, Dir, Suffix, Env, Set, Private, Hidden,
{Set, true}
end.
-check_name(M, M0, P0, File) ->
- P = '',
+check_name(M, M0, File) ->
N = M,
N0 = M0,
case N of
@@ -222,47 +214,12 @@ check_name(M, M0, P0, File) ->
ok
end
end,
- if P =/= P0 ->
- warning("file '~ts' belongs to package '~s', not '~s'.",
- [File, P, P0]);
- true ->
- ok
- end.
-
-
-%% Generating the summary files for packages.
-
-%% INHERIT-OPTIONS: read_file/4
-%% INHERIT-OPTIONS: edoc_lib:run_layout/2
-
-packages(Packages, Dir, FileMap, Env, Options) ->
- lists:foreach(fun (P) ->
- package(P, Dir, FileMap, Env, Options)
- end,
- Packages).
-
-package(P, Dir, FileMap, Env, Opts) ->
- Tags = case FileMap(P) of
- "" ->
- [];
- File ->
- read_file(File, package, Env, Opts)
- end,
- Data = edoc_data:package(P, Tags, Env, Opts),
- F = fun (M) ->
- M:package(Data, Opts)
- end,
- Text = edoc_lib:run_layout(F, Opts),
- edoc_lib:write_file(Text, Dir, ?PACKAGE_SUMMARY, P).
-
+ ok.
%% Creating an index file, with some frames optional.
%% TODO: get rid of frames, or change doctype to Frameset
-index_file(Dir, Packages, Title) ->
- Frame1 = {frame, [{src,?PACKAGES_FRAME},
- {name,"packagesFrame"},{title,""}],
- []},
+index_file(Dir, Title) ->
Frame2 = {frame, [{src,?MODULES_FRAME},
{name,"modulesFrame"},{title,""}],
[]},
@@ -270,16 +227,7 @@ index_file(Dir, Packages, Title) ->
{name,"overviewFrame"},{title,""}],
[]},
Frameset = {frameset, [{cols,"20%,80%"}],
- case Packages of
- true ->
- [?NL,
- {frameset, [{rows,"30%,70%"}],
- [?NL, Frame1, ?NL, Frame2, ?NL]}
- ];
- false ->
- [?NL, Frame2, ?NL]
- end
- ++ [?NL, Frame3, ?NL,
+ [?NL, Frame2, ?NL, ?NL, Frame3, ?NL,
{noframes,
[?NL,
{h2, ["This page uses frames"]},
@@ -296,24 +244,6 @@ index_file(Dir, Packages, Title) ->
Text = xmerl:export_simple([XML], xmerl_html, []),
edoc_lib:write_file(Text, Dir, ?INDEX_FILE).
-packages_frame(Dir, Ps, Title, CSS) ->
- Body = [?NL,
- {h2, [{class, "indextitle"}], ["Packages"]},
- ?NL,
- {table, [{width, "100%"}, {border, 0},
- {summary, "list of packages"}],
- lists:concat(
- [[?NL,
- {tr, [{td, [], [{a, [{href, package_ref(P)},
- {target,"overviewFrame"},
- {class, "package"}],
- [atom_to_list(P)]}]}]}]
- || P <- Ps])},
- ?NL],
- XML = xhtml(Title, CSS, Body),
- Text = xmerl:export_simple([XML], xmerl_html, []),
- edoc_lib:write_file(Text, Dir, ?PACKAGES_FRAME).
-
modules_frame(Dir, Ms, Title, CSS) ->
Body = [?NL,
{h2, [{class, "indextitle"}], ["Modules"]},
@@ -334,11 +264,7 @@ modules_frame(Dir, Ms, Title, CSS) ->
edoc_lib:write_file(Text, Dir, ?MODULES_FRAME).
module_ref(M) ->
- edoc_refs:relative_package_path(M, '') ++ ?DEFAULT_FILE_SUFFIX.
-
-package_ref(P) ->
- edoc_lib:join_uri(edoc_refs:relative_package_path(P, ''),
- ?PACKAGE_SUMMARY).
+ atom_to_list(M) ++ ?DEFAULT_FILE_SUFFIX.
xhtml(Title, CSS, Content) ->
xhtml_1(Title, CSS, {body, [{bgcolor, "white"}], Content}).
@@ -372,7 +298,7 @@ overview(Dir, Title, Env, Opts) ->
end,
Text = edoc_lib:run_layout(F, Opts),
EncOpts = [{encoding,Encoding}],
- edoc_lib:write_file(Text, Dir, ?OVERVIEW_SUMMARY, '', EncOpts).
+ edoc_lib:write_file(Text, Dir, ?OVERVIEW_SUMMARY, EncOpts).
copy_image(Dir) ->
case code:priv_dir(?EDOC_APP) of
@@ -505,7 +431,7 @@ app_index_file(Paths, Dir, Env, Options) ->
% Priv = proplists:get_bool(private, Options),
CSS = stylesheet(Options),
Apps1 = [{filename:dirname(A),filename:basename(A)} || A <- Paths],
- index_file(Dir, false, Title),
+ index_file(Dir, Title),
application_frame(Dir, Apps1, Title, CSS),
modules_frame(Dir, [], Title, CSS),
overview(Dir, Title, Env, Options),
diff --git a/lib/edoc/src/edoc_extract.erl b/lib/edoc/src/edoc_extract.erl
index 67a95e80aa..6d34de3a85 100644
--- a/lib/edoc/src/edoc_extract.erl
+++ b/lib/edoc/src/edoc_extract.erl
@@ -121,10 +121,8 @@ source1(Tree, File0, Env, Opts, TypeDocs) ->
Module = get_module_info(Tree, File),
{Header, Footer, Entries} = collect(Forms, Module),
Name = Module#module.name,
- Package = '',
Env1 = Env#env{module = Name,
- package = Package,
- root = edoc_refs:relative_package_path('', Package)},
+ root = ""},
Env2 = add_macro_defs(module_macros(Env1), Opts, Env1),
Entries1 = get_tags([Header, Footer | Entries], Env2, File, TypeDocs),
Entries2 = edoc_specs:add_data(Entries1, Opts, File, Module),
@@ -218,7 +216,7 @@ add_macro_defs(Defs0, Opts, Env) ->
%% @spec file(File::filename(), Context, Env::edoc_env(),
%% Options::proplist()) -> {ok, Tags} | {error, Reason}
-%% Context = overview | package
+%% Context = overview
%% Tags = [term()]
%% Reason = term()
%%
@@ -249,7 +247,7 @@ file(File, Context, Env, Opts) ->
%% @spec (Text::string(), Context, Env::edoc_env(),
%% Options::proplist()) -> Tags
-%% Context = overview | package
+%% Context = overview
%% Tags = [term()]
%%
%% @doc Returns the list of tags in the text. Any lines of text before
@@ -353,8 +351,6 @@ preprocess_forms_2(F, Fs) ->
[F | preprocess_forms_1(Fs)];
{function, _} ->
[F | preprocess_forms_1(Fs)];
- {rule, _} ->
- [F | preprocess_forms_1(Fs)];
{attribute, {module, _}} ->
[F | preprocess_forms_1(Fs)];
text ->
@@ -392,15 +388,6 @@ collect([F | Fs], Cs, Ss, Ts, As, Header, Mod) ->
export = Export,
data = {comment_text(Cs),Ss,Ts}} | As],
Header, Mod);
- {rule, Name} ->
- L = erl_syntax:get_pos(F),
- Export = ordsets:is_element(Name, Mod#module.exports),
- Args = parameters(erl_syntax:rule_clauses(F)),
- collect(Fs, [], [], [],
- [#entry{name = Name, args = Args, line = L,
- export = Export,
- data = {comment_text(Cs),Ss,Ts}} | As],
- Header, Mod);
{attribute, {module, _}} when Header =:= undefined ->
L = erl_syntax:get_pos(F),
collect(Fs, [], [], [], As,
diff --git a/lib/edoc/src/edoc_layout.erl b/lib/edoc/src/edoc_layout.erl
index 36d067d9bc..6309e88475 100644
--- a/lib/edoc/src/edoc_layout.erl
+++ b/lib/edoc/src/edoc_layout.erl
@@ -27,7 +27,7 @@
-module(edoc_layout).
--export([module/2, package/2, overview/2, type/1]).
+-export([module/2, overview/2, type/1]).
-import(edoc_report, [report/2]).
@@ -978,9 +978,6 @@ get_text(Name, Es) ->
local_label(R) ->
"#" ++ R.
-xhtml(Title, CSS, Body) ->
- xhtml(Title, CSS, Body, "latin1").
-
xhtml(Title, CSS, Body, Encoding) ->
EncString = case Encoding of
"latin1" -> "ISO-8859-1";
@@ -1010,27 +1007,6 @@ type(E, Ds) ->
xmerl:export_simple_content(t_utype_elem(E) ++ local_defs(Ds, Opts),
?HTML_EXPORT).
-package(E=#xmlElement{name = package, content = Es}, Options) ->
- Opts = init_opts(E, Options),
- Name = get_text(packageName, Es),
- Title = ["Package ", Name],
- Desc = get_content(description, Es),
-% ShortDesc = get_content(briefDescription, Desc),
- FullDesc = get_content(fullDescription, Desc),
- Body = ([?NL, {h1, [Title]}, ?NL]
-% ++ ShortDesc
- ++ copyright(Es)
- ++ deprecated(Es, "package")
- ++ version(Es)
- ++ since(Es)
- ++ authors(Es)
- ++ references(Es)
- ++ sees(Es)
- ++ todos(Es)
- ++ FullDesc),
- XML = xhtml(Title, stylesheet(Opts), Body),
- xmerl:export_simple(XML, ?HTML_EXPORT, []).
-
overview(E=#xmlElement{name = overview, content = Es}, Options) ->
Opts = init_opts(E, Options),
Title = [get_text(title, Es)],
diff --git a/lib/edoc/src/edoc_lib.erl b/lib/edoc/src/edoc_lib.erl
index c46338a2e1..813fcf2476 100644
--- a/lib/edoc/src/edoc_lib.erl
+++ b/lib/edoc/src/edoc_lib.erl
@@ -29,9 +29,9 @@
get_first_sentence/1, is_space/1, strip_space/1, parse_expr/2,
parse_contact/2, escape_uri/1, join_uri/2, is_relative_uri/1,
is_name/1, to_label/1, find_doc_dirs/0, find_sources/2,
- find_sources/3, find_file/3, try_subdir/2, unique/1,
- write_file/3, write_file/4, write_file/5, write_info_file/4,
- read_info_file/1, get_doc_env/1, get_doc_env/4, copy_file/2,
+ find_file/2, try_subdir/2, unique/1,
+ write_file/3, write_file/4, write_info_file/3,
+ read_info_file/1, get_doc_env/1, get_doc_env/3, copy_file/2,
uri_get/1, run_doclet/2, run_layout/2,
simplify_path/1, timestr/1, datestr/1, read_encoding/2]).
@@ -266,13 +266,6 @@ is_name_1([$_ | Cs]) ->
is_name_1([]) -> true;
is_name_1(_) -> false.
-to_atom(A) when is_atom(A) -> A;
-to_atom(S) when is_list(S) -> list_to_atom(S).
-
-to_list(A) when is_atom(A) -> atom_to_list(A);
-to_list(S) when is_list(S) -> S.
-
-
%% @private
unique([X | Xs]) -> [X | unique(Xs, X)];
unique([]) -> [].
@@ -674,7 +667,7 @@ simplify_path(P) ->
try_subdir(Dir, Subdir) ->
D = filename:join(Dir, Subdir),
case filelib:is_dir(D) of
- true -> D;
+ true -> D;
false -> Dir
end.
@@ -686,19 +679,10 @@ try_subdir(Dir, Subdir) ->
%% @private
write_file(Text, Dir, Name) ->
- write_file(Text, Dir, Name, '').
-
-%% @spec (Text::deep_string(), Dir::edoc:filename(),
-%% Name::edoc:filename(), Package::atom()|string()) -> ok
-%% @doc Like {@link write_file/3}, but adds path components to the target
-%% directory corresponding to the specified package.
-%% @private
+ write_file(Text, Dir, Name, [{encoding,latin1}]).
-write_file(Text, Dir, Name, Package) ->
- write_file(Text, Dir, Name, Package, [{encoding,latin1}]).
-
-write_file(Text, Dir, Name, Package, Options) ->
- File = filename:join([Dir, to_list(Package), Name]),
+write_file(Text, Dir, Name, Options) ->
+ File = filename:join([Dir, Name]),
ok = filelib:ensure_dir(File),
case file:open(File, [write] ++ Options) of
{ok, FD} ->
@@ -711,15 +695,14 @@ write_file(Text, Dir, Name, Package, Options) ->
end.
%% @private
-write_info_file(App, Packages, Modules, Dir) ->
- Ts = [{packages, Packages},
- {modules, Modules}],
+write_info_file(App, Modules, Dir) ->
+ Ts = [{modules, Modules}],
Ts1 = if App =:= ?NO_APP -> Ts;
true -> [{application, App} | Ts]
end,
S0 = [io_lib:fwrite("~p.\n", [T]) || T <- Ts1],
S = ["%% encoding: UTF-8\n" | S0],
- write_file(S, Dir, ?INFO_FILE, '', [{encoding,unicode}]).
+ write_file(S, Dir, ?INFO_FILE, [{encoding,unicode}]).
%% @spec (Name::edoc:filename()) -> {ok, string()} | {error, Reason}
%%
@@ -744,9 +727,8 @@ read_file(File) ->
info_file_data(Ts) ->
App = proplists:get_value(application, Ts, ?NO_APP),
- Ps = proplists:append_values(packages, Ts),
Ms = proplists:append_values(modules, Ts),
- {App, Ps, Ms}.
+ {App, Ms}.
%% Local file access - don't complain if file does not exist.
@@ -761,10 +743,10 @@ read_info_file(Dir) ->
{error, R} ->
R1 = file:format_error(R),
warning("could not read '~ts': ~ts.", [File, R1]),
- {?NO_APP, [], []}
- end;
+ {?NO_APP, []}
+ end;
false ->
- {?NO_APP, [], []}
+ {?NO_APP, []}
end.
%% URI access
@@ -776,7 +758,7 @@ uri_get_info_file(Base) ->
parse_info_file(Text, URI);
{error, Msg} ->
warning("could not read '~ts': ~ts.", [URI, Msg]),
- {?NO_APP, [], []}
+ {?NO_APP, []}
end.
parse_info_file(Text, Name) ->
@@ -785,10 +767,10 @@ parse_info_file(Text, Name) ->
info_file_data(Vs);
{error, eof} ->
warning("unexpected end of file in '~ts'.", [Name]),
- {?NO_APP, [], []};
+ {?NO_APP, []};
{error, {_Line,Module,R}} ->
warning("~ts: ~ts.", [Module:format_error(R), Name]),
- {?NO_APP, [], []}
+ {?NO_APP, []}
end.
parse_terms(Text) ->
@@ -815,82 +797,67 @@ parse_terms_1([], _As, _Vs) ->
%% ---------------------------------------------------------------------
-%% Source files and packages
-
-%% @private
-find_sources(Path, Opts) ->
- find_sources(Path, "", Opts).
+%% Source files
-%% @doc See {@link edoc:run/3} for a description of the options
-%% `subpackages', `source_suffix' and `exclude_packages'.
+%% @doc See {@link edoc:run/2} for a description of the options
+%% `subpackages', `source_suffix'.
%% @private
-%% NEW-OPTIONS: subpackages, source_suffix, exclude_packages
-%% DEFER-OPTIONS: edoc:run/3
+%% NEW-OPTIONS: subpackages, source_suffix
+%% DEFER-OPTIONS: edoc:run/2
-find_sources(Path, Pkg, Opts) ->
+find_sources(Path, Opts) ->
Rec = proplists:get_bool(subpackages, Opts),
Ext = proplists:get_value(source_suffix, Opts, ?DEFAULT_SOURCE_SUFFIX),
- find_sources(Path, Pkg, Rec, Ext, Opts).
+ find_sources(Path, Rec, Ext, Opts).
-find_sources(Path, Pkg, Rec, Ext, Opts) ->
- Skip = proplists:get_value(exclude_packages, Opts, []),
- lists:flatten(find_sources_1(Path, to_atom(Pkg), Rec, Ext, Skip)).
+find_sources(Path, Rec, Ext, _Opts) ->
+ lists:flatten(find_sources_1(Path, Rec, Ext)).
-find_sources_1([P | Ps], Pkg, Rec, Ext, Skip) ->
- Dir = filename:join(P, atom_to_list(Pkg)),
- Fs1 = find_sources_1(Ps, Pkg, Rec, Ext, Skip),
+find_sources_1([P | Ps], Rec, Ext) ->
+ Dir = P,
+ Fs1 = find_sources_1(Ps, Rec, Ext),
case filelib:is_dir(Dir) of
true ->
- [find_sources_2(Dir, Pkg, Rec, Ext, Skip) | Fs1];
+ [find_sources_2(Dir, Rec, Ext) | Fs1];
false ->
Fs1
end;
-find_sources_1([], _Pkg, _Rec, _Ext, _Skip) ->
+find_sources_1([], _Rec, _Ext) ->
[].
-find_sources_2(Dir, Pkg, Rec, Ext, Skip) ->
- case lists:member(Pkg, Skip) of
- false ->
- Es = list_dir(Dir, false), % just warn if listing fails
- Es1 = [{Pkg, E, Dir} || E <- Es, is_source_file(E, Ext)],
- case Rec of
+find_sources_2(Dir, Rec, Ext) ->
+ Es = list_dir(Dir, false), % just warn if listing fails
+ Es1 = [{E, Dir} || E <- Es, is_source_file(E, Ext)],
+ case Rec of
true ->
- [find_sources_3(Es, Dir, Pkg, Rec, Ext, Skip) | Es1];
+ [find_sources_3(Es, Dir, Rec, Ext) | Es1];
false ->
- Es1
- end;
- true ->
- []
- end.
+ Es1
+ end.
-find_sources_3(Es, Dir, Pkg, Rec, Ext, Skip) ->
+find_sources_3(Es, Dir, Rec, Ext) ->
[find_sources_2(filename:join(Dir, E),
- to_atom(join(Pkg, E)), Rec, Ext, Skip)
- || E <- Es, is_package_dir(E, Dir)].
-
-join('', E) -> E;
-join(Pkg, E) -> filename:join(Pkg, E).
+ Rec, Ext)
+ || E <- Es, is_source_dir(E, Dir)].
is_source_file(Name, Ext) ->
(filename:extension(Name) == Ext)
andalso is_name(filename:rootname(Name, Ext)).
-is_package_dir(Name, Dir) ->
- is_name(filename:rootname(filename:basename(Name)))
- andalso filelib:is_dir(filename:join(Dir, Name)).
+is_source_dir(Name, Dir) ->
+ filelib:is_dir(filename:join(Dir, Name)).
%% @private
-find_file([P | Ps], []=Pkg, Name) ->
- Pkg = [],
+find_file([P | Ps], Name) ->
File = filename:join(P, Name),
case filelib:is_file(File) of
true ->
- File;
+ File;
false ->
- find_file(Ps, Pkg, Name)
- end;
-find_file([], [], _Name) ->
+ find_file(Ps, Name)
+ end;
+find_file([], _Name) ->
"".
%% @private
@@ -909,7 +876,7 @@ find_doc_dirs([P0 | Ps]) ->
File = filename:join(Dir, ?INFO_FILE),
case filelib:is_file(File) of
true ->
- [Dir | find_doc_dirs(Ps)];
+ [Dir | find_doc_dirs(Ps)];
false ->
find_doc_dirs(Ps)
end;
@@ -923,22 +890,21 @@ find_doc_dirs([]) ->
%% NEW-OPTIONS: doc_path
%% DEFER-OPTIONS: get_doc_env/4
-get_doc_links(App, Packages, Modules, Opts) ->
+get_doc_links(App, Modules, Opts) ->
Path = proplists:append_values(doc_path, Opts) ++ find_doc_dirs(),
Ds = [{P, uri_get_info_file(P)} || P <- Path],
- Ds1 = [{"", {App, Packages, Modules}} | Ds],
+ Ds1 = [{"", {App, Modules}} | Ds],
D = dict:new(),
- make_links(Ds1, D, D, D).
+ make_links(Ds1, D, D).
-make_links([{Dir, {App, Ps, Ms}} | Ds], A, P, M) ->
+make_links([{Dir, {App, Ms}} | Ds], A, M) ->
A1 = if App == ?NO_APP -> A;
true -> add_new(App, Dir, A)
end,
F = fun (K, D) -> add_new(K, Dir, D) end,
- P1 = lists:foldl(F, P, Ps),
M1 = lists:foldl(F, M, Ms),
- make_links(Ds, A1, P1, M1);
-make_links([], A, P, M) ->
+ make_links(Ds, A1, M1);
+make_links([], A, M) ->
F = fun (D) ->
fun (K) ->
case dict:find(K, D) of
@@ -947,7 +913,7 @@ make_links([], A, P, M) ->
end
end
end,
- {F(A), F(P), F(M)}.
+ {F(A), F(M)}.
add_new(K, V, D) ->
case dict:is_key(K, D) of
@@ -962,11 +928,10 @@ add_new(K, V, D) ->
%% @private
get_doc_env(Opts) ->
- get_doc_env([], [], [], Opts).
+ get_doc_env([], [], Opts).
-%% @spec (App, Packages, Modules, Options::proplist()) -> edoc_env()
+%% @spec (App, Modules, Options::proplist()) -> edoc_env()
%% App = [] | atom()
-%% Packages = [atom()]
%% Modules = [atom()]
%% proplist() = [term()]
%%
@@ -985,17 +950,15 @@ get_doc_env(Opts) ->
%% INHERIT-OPTIONS: get_doc_links/4
%% DEFER-OPTIONS: edoc:run/3
-get_doc_env(App, Packages, Modules, Opts) ->
+get_doc_env(App, Modules, Opts) ->
Suffix = proplists:get_value(file_suffix, Opts,
?DEFAULT_FILE_SUFFIX),
AppDefault = proplists:get_value(app_default, Opts, ?APP_DEFAULT),
Includes = proplists:append_values(includes, Opts),
- {A, P, M} = get_doc_links(App, Packages, Modules, Opts),
+ {A, M} = get_doc_links(App, Modules, Opts),
#env{file_suffix = Suffix,
- package_summary = ?PACKAGE_SUMMARY ++ Suffix,
apps = A,
- packages = P,
modules = M,
app_default = AppDefault,
includes = Includes
diff --git a/lib/edoc/src/edoc_macros.erl b/lib/edoc/src/edoc_macros.erl
index 8efbfd00c7..bdcb3fe81f 100644
--- a/lib/edoc/src/edoc_macros.erl
+++ b/lib/edoc/src/edoc_macros.erl
@@ -40,10 +40,6 @@ std_macros(Env) ->
true -> [{module, atom_to_list(Env#env.module)}]
end
++
- if Env#env.package =:= [] -> [];
- true -> [{package, atom_to_list(Env#env.package)}]
- end
- ++
[{date, fun date_macro/3},
{docRoot, Env#env.root},
{link, fun link_macro/3},
diff --git a/lib/edoc/src/edoc_parser.yrl b/lib/edoc/src/edoc_parser.yrl
index c6f8a04775..48c01c8dce 100644
--- a/lib/edoc/src/edoc_parser.yrl
+++ b/lib/edoc/src/edoc_parser.yrl
@@ -28,7 +28,7 @@
Nonterminals
start spec func_type utype_list utype_tuple utypes utype ptypes ptype
nutype function_name where_defs defs defs2 def typedef etype
-throws qname ref aref mref lref pref var_list vars fields field
+throws qname ref aref mref lref var_list vars fields field
utype_map utype_map_fields utype_map_field
futype_list bin_base_type bin_unit_type.
@@ -207,14 +207,11 @@ typedef -> atom var_list '=' utype where_defs:
ref -> aref: '$1'.
ref -> mref: '$1'.
ref -> lref: '$1'.
-ref -> pref: '$1'.
aref -> '//' atom:
edoc_refs:app(tok_val('$2')).
aref -> '//' atom '/' mref:
edoc_refs:app(tok_val('$2'), '$4').
-aref -> '//' atom '/' pref:
- edoc_refs:app(tok_val('$2'), '$4').
mref -> qname ':' atom '/' integer:
edoc_refs:function(qname('$1'), tok_val('$3'), tok_val('$5')).
@@ -223,9 +220,6 @@ mref -> qname ':' atom '(' ')':
mref -> qname:
edoc_refs:module(qname('$1')).
-pref -> qname '.' '*':
- edoc_refs:package(qname('$1')).
-
lref -> atom '/' integer:
edoc_refs:function(tok_val('$1'), tok_val('$3')).
lref -> atom '(' ')':
@@ -399,7 +393,7 @@ parse_typedef_1(S, L) ->
%% @doc Parses a <a
%% href="overview-summary.html#References">reference</a> to a module,
-%% package, function, type, or application
+%% function, type, or application
parse_ref(S, L) ->
case edoc_scanner:string(S, L) of
diff --git a/lib/edoc/src/edoc_refs.erl b/lib/edoc/src/edoc_refs.erl
index ea439490ed..b9a9391053 100644
--- a/lib/edoc/src/edoc_refs.erl
+++ b/lib/edoc/src/edoc_refs.erl
@@ -27,10 +27,9 @@
-module(edoc_refs).
--export([app/1, app/2, package/1, module/1, module/2, module/3,
+-export([app/1, app/2, module/1, module/2, module/3,
function/2, function/3, function/4, type/1, type/2, type/3,
- to_string/1, to_label/1, get_uri/2, is_top/2,
- relative_module_path/2, relative_package_path/2]).
+ to_string/1, to_label/1, get_uri/2, is_top/2]).
-import(edoc_lib, [join_uri/2, escape_uri/1]).
@@ -56,9 +55,6 @@ module(M, Ref) ->
module(App, M, Ref) ->
app(App, module(M, Ref)).
-package(P) ->
- {package, P}.
-
function(F, A) ->
{function, F, A}.
@@ -88,8 +84,6 @@ to_string({module, M}) ->
atom_to_list(M) ;
to_string({module, M, Ref}) ->
atom_to_list(M) ++ ":" ++ to_string(Ref);
-to_string({package, P}) ->
- atom_to_list(P) ++ ".*";
to_string({function, F, A}) ->
atom_to_list(F) ++ "/" ++ integer_to_list(A);
to_string({type, T}) ->
@@ -111,24 +105,19 @@ get_uri({module, M, Ref}, Env) ->
module_ref(M, Env) ++ "#" ++ to_label(Ref);
get_uri({module, M}, Env) ->
module_ref(M, Env);
-get_uri({package, P}, Env) ->
- package_ref(P, Env);
get_uri(Ref, _Env) ->
"#" ++ to_label(Ref).
abs_uri({module, M}, Env) ->
module_absref(M, Env);
abs_uri({module, M, Ref}, Env) ->
- module_absref(M, Env) ++ "#" ++ to_label(Ref);
-abs_uri({package, P}, Env) ->
- package_absref(P, Env).
+ module_absref(M, Env) ++ "#" ++ to_label(Ref).
module_ref(M, Env) ->
case (Env#env.modules)(M) of
"" ->
File = atom_to_list(M) ++ Env#env.file_suffix,
- Path = relative_module_path(M, Env#env.package),
- join_uri(Path, escape_uri(File));
+ escape_uri(File);
Base ->
join_uri(Base, module_absref(M, Env))
end.
@@ -136,19 +125,6 @@ module_ref(M, Env) ->
module_absref(M, Env) ->
escape_uri(atom_to_list(M)) ++ escape_uri(Env#env.file_suffix).
-package_ref(P, Env) ->
- case (Env#env.packages)(P) of
- "" ->
- join_uri(relative_package_path(P, Env#env.package),
- escape_uri(Env#env.package_summary));
- Base ->
- join_uri(Base, package_absref(P, Env))
- end.
-
-package_absref(P, Env) ->
- join_uri(escape_uri(atom_to_list(P)),
- escape_uri(Env#env.package_summary)).
-
app_ref(A, Env) ->
case (Env#env.apps)(A) of
"" ->
@@ -166,43 +142,3 @@ is_top({app, _App}, _Env) ->
is_top(_Ref, _Env) ->
false.
-%% Each segment of a path must be separately escaped before joining.
-
-join_segments([S]) ->
- escape_uri(S);
-join_segments([S | Ss]) ->
- join_uri(escape_uri(S), join_segments(Ss)).
-
-%% 'From' is always the "current package" here:
-
-%% The empty string is returned if the To module has only one segment,
-%% implying a local reference.
-
-relative_module_path(_To, _From) ->
- "".
-
-relative_package_path(To, From) ->
- relative_path([atom_to_list(To)], [atom_to_list(From)]).
-
-%% This takes two lists of path segments (From, To). Note that an empty
-%% string will be returned if the paths are the same. Empty leading
-%% segments are stripped from both paths.
-
-relative_path(Ts, ["" | Fs]) ->
- relative_path(Ts, Fs);
-relative_path(["" | Ts], Fs) ->
- relative_path(Ts, Fs);
-relative_path(Ts, Fs) ->
- relative_path_1(Ts, Fs).
-
-relative_path_1([T | Ts], [F | Fs]) when F == T ->
- relative_path_1(Ts, Fs);
-relative_path_1(Ts, Fs) ->
- relative_path_2(Fs, Ts).
-
-relative_path_2([_F | Fs], Ts) ->
- relative_path_2(Fs, [".." | Ts]);
-relative_path_2([], []) ->
- "";
-relative_path_2([], Ts) ->
- join_segments(Ts).
diff --git a/lib/edoc/src/edoc_run.erl b/lib/edoc/src/edoc_run.erl
index b5a1ef713d..9a569d0879 100644
--- a/lib/edoc/src/edoc_run.erl
+++ b/lib/edoc/src/edoc_run.erl
@@ -17,7 +17,7 @@
%% @copyright 2003 Richard Carlsson
%% @author Richard Carlsson <[email protected]>
%% @see edoc
-%% @end
+%% @end
%% =====================================================================
%% @doc Interface for calling EDoc from Erlang startup options.
@@ -38,7 +38,7 @@
-module(edoc_run).
--export([file/1, application/1, packages/1, files/1, toc/1]).
+-export([file/1, application/1, files/1, toc/1]).
-compile({no_auto_import,[error/1]}).
@@ -92,28 +92,6 @@ files(Args) ->
end,
run(F).
-%% @spec packages([string()]) -> none()
-%%
-%% @doc Calls {@link edoc:application/2} with the corresponding
-%% arguments. The strings in the list are parsed as Erlang constant
-%% terms. The list can be either `[Packages]' or `[Packages, Options]'.
-%% In the first case {@link edoc:application/1} is called instead.
-%%
-%% The function call never returns; instead, the emulator is
-%% automatically terminated when the call has completed, signalling
-%% success or failure to the operating system.
-
-packages(Args) ->
- F = fun () ->
- case parse_args(Args) of
- [Packages] -> edoc:packages(Packages);
- [Packages, Opts] -> edoc:packages(Packages, Opts);
- _ ->
- invalid_args("edoc_run:packages/1", Args)
- end
- end,
- run(F).
-
%% @hidden Not official yet
toc(Args) ->
F = fun () ->
@@ -131,8 +109,8 @@ toc(Args) ->
%%
%% @deprecated This is part of the old interface to EDoc and is mainly
%% kept for backwards compatibility. The preferred way of generating
-%% documentation is through one of the functions {@link application/1},
-%% {@link packages/1} and {@link files/1}.
+%% documentation is through one of the functions {@link application/1}
+%% and {@link files/1}.
%%
%% @doc Calls {@link edoc:file/2} with the corresponding arguments. The
%% strings in the list are parsed as Erlang constant terms. The list can
diff --git a/lib/edoc/src/edoc_tags.erl b/lib/edoc/src/edoc_tags.erl
index 82a1b72d84..c1c453511a 100644
--- a/lib/edoc/src/edoc_tags.erl
+++ b/lib/edoc/src/edoc_tags.erl
@@ -42,7 +42,7 @@
%% Name = atom()
%% Parser = text | xml | (Text,Line,Where) -> term()
%% Flags = [Flag]
-%% Flag = module | function | package | overview | single
+%% Flag = module | function | overview | single
%%
%% Note that the pseudo-tag '@clear' is not listed here.
%% (Cf. the function 'filter_tags'.)
@@ -57,11 +57,11 @@
%% - @category (useless; superseded by keywords or free text search)
tags() ->
- All = [module,footer,function,package,overview],
- [{author, fun parse_contact/4, [module,package,overview]},
- {copyright, text, [module,package,overview,single]},
- {deprecated, xml, [module,function,package,single]},
- {doc, xml, [module,function,package,overview,single]},
+ All = [module,footer,function,overview],
+ [{author, fun parse_contact/4, [module,overview]},
+ {copyright, text, [module,overview,single]},
+ {deprecated, xml, [module,function,single]},
+ {doc, xml, [module,function,overview,single]},
{docfile, fun parse_file/4, All},
{'end', text, All},
{equiv, fun parse_expr/4, [function,single]},
@@ -69,17 +69,17 @@ tags() ->
{hidden, text, [module,function,single]},
{param, fun parse_param/4, [function]},
{private, text, [module,function,single]},
- {reference, xml, [module,footer,package,overview]},
+ {reference, xml, [module,footer,overview]},
{returns, xml, [function,single]},
- {see, fun parse_see/4, [module,function,package,overview]},
- {since, text, [module,function,package,overview,single]},
+ {see, fun parse_see/4, [module,function,overview]},
+ {since, text, [module,function,overview,single]},
{spec, fun parse_spec/4, [function,single]},
{throws, fun parse_throws/4, [function,single]},
{title, text, [overview,single]},
{'TODO', xml, All},
{todo, xml, All},
{type, fun parse_typedef/4, [module,footer,function]},
- {version, text, [module,package,overview,single]}].
+ {version, text, [module,overview,single]}].
aliases('TODO') -> todo;
aliases(return) -> returns;
@@ -369,7 +369,7 @@ parse_header(Data, Line, Env, Where) when is_list(Where) ->
{string, _, File} ->
Dir = filename:dirname(Where),
Path = Env#env.includes ++ [Dir],
- case edoc_lib:find_file(Path, "", File) of
+ case edoc_lib:find_file(Path, File) of
"" ->
throw_error(Line, {file_not_found, File});
File1 ->
diff --git a/lib/edoc/src/otpsgml_layout.erl b/lib/edoc/src/otpsgml_layout.erl
index 2c4cd919bb..052c75b9d4 100644
--- a/lib/edoc/src/otpsgml_layout.erl
+++ b/lib/edoc/src/otpsgml_layout.erl
@@ -28,7 +28,7 @@
-module(otpsgml_layout).
--export([module/2, package/2, overview/2,type/1]).
+-export([module/2, overview/2,type/1]).
-import(edoc_report, [report/2]).
@@ -811,27 +811,6 @@ xml(Title, CSS, Body) ->
xmerl:export_simple_content(t_utype_elem(E) ++ local_defs(Ds),
?SGML_EXPORT).
-
-package(E=#xmlElement{name = package, content = Es}, Options) ->
- Opts = init_opts(E, Options),
- Name = get_text(packageName, Es),
- Title = io_lib:fwrite("Package ~s", [Name]),
- Desc = get_content(description, Es),
-% ShortDesc = get_content(briefDescription, Desc),
- FullDesc = get_content(fullDescription, Desc),
- Body = ([?NL, {h1, [Title]}, ?NL]
-% ++ ShortDesc
- ++ copyright(Es)
- ++ deprecated(Es, "package")
- ++ version(Es)
- ++ since(Es)
- ++ authors(Es)
- ++ references(Es)
- ++ sees(Es)
- ++ FullDesc),
- XML = xml(Title, stylesheet(Opts), Body),
- xmerl:export_simple([XML], ?SGML_EXPORT, []).
-
overview(E=#xmlElement{name = overview, content = Es}, Options) ->
Opts = init_opts(E, Options),
Title = get_text(title, Es),
@@ -843,6 +822,7 @@ overview(E=#xmlElement{name = overview, content = Es}, Options) ->
++ copyright(Es)
++ version(Es)
++ since(Es)
+ ++ deprecated(Es, "application")
++ authors(Es)
++ references(Es)
++ sees(Es)
diff --git a/lib/edoc/test/edoc_SUITE.erl b/lib/edoc/test/edoc_SUITE.erl
index c63660c8c0..6b23054ce3 100644
--- a/lib/edoc/test/edoc_SUITE.erl
+++ b/lib/edoc/test/edoc_SUITE.erl
@@ -22,12 +22,12 @@
init_per_group/2,end_per_group/2]).
%% Test cases
--export([app/1,appup/1,build_std/1,build_map_module/1,otp_12008/1]).
+-export([app/1,appup/1,build_std/1,build_map_module/1,otp_12008/1, build_app/1]).
suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
- [app,appup,build_std,build_map_module,otp_12008].
+ [app,appup,build_std,build_map_module,otp_12008, build_app].
groups() ->
[].
@@ -95,3 +95,20 @@ otp_12008(Config) when is_list(Config) ->
ok = edoc:files([Un2], Opts2),
{'EXIT', error} = (catch edoc:files([Un3], Opts2)),
ok.
+
+build_app(suite) -> [];
+build_app(doc) -> ["Build a local app with nested source directories"];
+build_app(Config) ->
+ DataDir = ?config(data_dir, Config),
+ PrivDir = ?config(priv_dir, Config),
+ OutDir = filename:join(PrivDir, "myapp"),
+ Src = filename:join(DataDir, "myapp"),
+
+ ok = edoc:application(myapp, Src, [{dir, OutDir}, {subpackages, false}]),
+ true = filelib:is_regular(filename:join(OutDir, "a.html")),
+ false = filelib:is_regular(filename:join(OutDir, "b.html")),
+
+ ok = edoc:application(myapp, Src, [{dir, OutDir}]),
+ true = filelib:is_regular(filename:join(OutDir, "a.html")),
+ true = filelib:is_regular(filename:join(OutDir, "b.html")),
+ ok.
diff --git a/lib/edoc/test/edoc_SUITE_data/myapp/doc/.dummy b/lib/edoc/test/edoc_SUITE_data/myapp/doc/.dummy
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/edoc/test/edoc_SUITE_data/myapp/doc/.dummy
diff --git a/lib/edoc/test/edoc_SUITE_data/myapp/src/a.erl b/lib/edoc/test/edoc_SUITE_data/myapp/src/a.erl
new file mode 100644
index 0000000000..1b5b704551
--- /dev/null
+++ b/lib/edoc/test/edoc_SUITE_data/myapp/src/a.erl
@@ -0,0 +1 @@
+-module(a).
diff --git a/lib/edoc/test/edoc_SUITE_data/myapp/src/src_1/b.erl b/lib/edoc/test/edoc_SUITE_data/myapp/src/src_1/b.erl
new file mode 100644
index 0000000000..6d6f15dfe5
--- /dev/null
+++ b/lib/edoc/test/edoc_SUITE_data/myapp/src/src_1/b.erl
@@ -0,0 +1 @@
+-module(b).
diff --git a/lib/eldap/src/eldap.erl b/lib/eldap/src/eldap.erl
index 977f7d2809..ae47c815c9 100644
--- a/lib/eldap/src/eldap.erl
+++ b/lib/eldap/src/eldap.erl
@@ -413,7 +413,7 @@ parse_args([{port, Port}|T], Cpid, Data) when is_integer(Port) ->
parse_args([{timeout, Timeout}|T], Cpid, Data) when is_integer(Timeout),Timeout>0 ->
parse_args(T, Cpid, Data#eldap{timeout = Timeout});
parse_args([{anon_auth, true}|T], Cpid, Data) ->
- parse_args(T, Cpid, Data#eldap{anon_auth = false});
+ parse_args(T, Cpid, Data#eldap{anon_auth = true});
parse_args([{anon_auth, _}|T], Cpid, Data) ->
parse_args(T, Cpid, Data);
parse_args([{ssl, true}|T], Cpid, Data) ->
diff --git a/lib/erl_docgen/priv/bin/specs_gen.escript b/lib/erl_docgen/priv/bin/specs_gen.escript
index 156311565c..e8a8f14e3a 100644
--- a/lib/erl_docgen/priv/bin/specs_gen.escript
+++ b/lib/erl_docgen/priv/bin/specs_gen.escript
@@ -97,7 +97,7 @@ read_file(File, Opts) ->
edoc:read_source(File, Opts).
extract(File, Forms, Opts) ->
- Env = edoc_lib:get_doc_env([], [], [], _Opts=[]),
+ Env = edoc_lib:get_doc_env([], [], _Opts=[]),
{_Module, Doc} = edoc_extract:source(Forms, File, Env, Opts),
Doc.
diff --git a/lib/erl_docgen/priv/bin/xml_from_edoc.escript b/lib/erl_docgen/priv/bin/xml_from_edoc.escript
index 65a580dca2..007546e7ba 100755
--- a/lib/erl_docgen/priv/bin/xml_from_edoc.escript
+++ b/lib/erl_docgen/priv/bin/xml_from_edoc.escript
@@ -117,7 +117,7 @@ users_guide(File, Args) ->
Text = edoc_lib:run_layout(F, Opts),
OutFile = "chapter" ++ Args#args.suffix,
- edoc_lib:write_file(Text, ".", OutFile, '', Encoding);
+ edoc_lib:write_file(Text, ".", OutFile, Encoding);
false ->
io:format("~s: not a regular file\n", [File]),
usage()
diff --git a/lib/hipe/cerl/erl_bif_types.erl b/lib/hipe/cerl/erl_bif_types.erl
index 74e93bf098..5b1401b34a 100644
--- a/lib/hipe/cerl/erl_bif_types.erl
+++ b/lib/hipe/cerl/erl_bif_types.erl
@@ -1070,9 +1070,6 @@ type(hipe_bifs, find_na_or_make_stub, 2, Xs, Opaques) ->
type(hipe_bifs, fun_to_address, 1, Xs, Opaques) ->
strict(hipe_bifs, fun_to_address, 1, Xs,
fun (_) -> t_integer() end, Opaques);
-%% type(hipe_bifs, get_emu_address, 1, Xs, Opaques) ->
-%% strict(hipe_bifs, get_emu_address, 1, Xs,
-%% fun (_) -> t_integer() end, Opaques); % address
type(hipe_bifs, get_fe, 2, Xs, Opaques) ->
strict(hipe_bifs, get_fe, 2, Xs, fun (_) -> t_integer() end, Opaques);
type(hipe_bifs, get_rts_param, 1, Xs, Opaques) ->
@@ -1081,9 +1078,6 @@ type(hipe_bifs, get_rts_param, 1, Xs, Opaques) ->
type(hipe_bifs, invalidate_funinfo_native_addresses, 1, Xs, Opaques) ->
strict(hipe_bifs, invalidate_funinfo_native_addresses, 1, Xs,
fun (_) -> t_nil() end, Opaques);
-%% type(hipe_bifs, make_native_stub, 2, Xs, Opaques) ->
-%% strict(hipe_bifs, make_native_stub, 2, Xs,
-%% fun (_) -> t_integer() end, Opaques); % address
type(hipe_bifs, mark_referred_from, 1, Xs, Opaques) ->
strict(hipe_bifs, mark_referred_from, 1, Xs,
fun (_) -> t_nil() end, Opaques);
@@ -2462,16 +2456,12 @@ arg_types(hipe_bifs, find_na_or_make_stub, 2) ->
[t_mfa(), t_boolean()];
arg_types(hipe_bifs, fun_to_address, 1) ->
[t_mfa()];
-%% arg_types(hipe_bifs, get_emu_address, 1) ->
-%% [t_mfa()];
arg_types(hipe_bifs, get_fe, 2) ->
[t_atom(), t_tuple([t_integer(), t_integer(), t_integer()])];
arg_types(hipe_bifs, get_rts_param, 1) ->
[t_fixnum()];
arg_types(hipe_bifs, invalidate_funinfo_native_addresses, 1) ->
[t_list(t_mfa())];
-%% arg_types(hipe_bifs, make_native_stub, 2) ->
-%% [t_integer(), t_arity()];
arg_types(hipe_bifs, mark_referred_from, 1) ->
[t_mfa()];
arg_types(hipe_bifs, merge_term, 1) ->
diff --git a/lib/hipe/cerl/erl_types.erl b/lib/hipe/cerl/erl_types.erl
index 5124e7238a..09dffe1280 100644
--- a/lib/hipe/cerl/erl_types.erl
+++ b/lib/hipe/cerl/erl_types.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2003-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2003-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -78,10 +78,11 @@
t_non_neg_fixnum/0,
t_pos_fixnum/0,
t_float/0,
+ t_var_names/1,
t_form_to_string/1,
- t_from_form/1,
- t_from_form/2,
- t_from_form/3,
+ t_from_form/4,
+ t_from_form/5,
+ t_from_form_without_remote/2,
t_from_range/2,
t_from_range_unsafe/2,
t_from_term/1,
@@ -181,7 +182,6 @@
t_remote/3,
t_string/0,
t_struct_from_opaque/2,
- t_solve_remote/3,
t_subst/2,
t_subtract/2,
t_subtract_list/2,
@@ -248,6 +248,8 @@
%%
-define(REC_TYPE_LIMIT, 2).
+-define(EXPAND_DEPTH, 16).
+-define(EXPAND_LIMIT, 10000).
-define(TUPLE_TAG_LIMIT, 5).
-define(TUPLE_ARITY_LIMIT, 8).
@@ -366,7 +368,7 @@
-type record_key() :: {'record', atom()}.
-type type_key() :: {'type' | 'opaque', atom(), arity()}.
--type record_value() :: orddict:orddict(). % XXX. To be refined
+-type record_value() :: [{atom(), erl_parse:abstract_expr(), erl_type()}].
-type type_value() :: {module(), erl_type(), atom()}.
-type type_table() :: dict:dict(record_key(), record_value())
| dict:dict(type_key(), type_value()).
@@ -747,7 +749,7 @@ t_opaque_from_records(RecDict) ->
end
end, RecDict),
OpaqueTypeDict =
- dict:map(fun({opaque, Name, _Arity}, {Module, _Type, ArgNames}) ->
+ dict:map(fun({opaque, Name, _Arity}, {{Module, _Form, ArgNames}, _Type}) ->
%% Args = args_to_types(ArgNames),
%% List = lists:zip(ArgNames, Args),
%% TmpVarDict = dict:from_list(List),
@@ -808,134 +810,6 @@ is_remote(_) -> false.
-type mod_records() :: dict:dict(module(), type_table()).
--spec t_solve_remote(erl_type(), sets:set(mfa()), mod_records()) -> erl_type().
-
-t_solve_remote(Type, ExpTypes, Records) ->
- {RT, _RR} = t_solve_remote(Type, ExpTypes, Records, []),
- RT.
-
-t_solve_remote(?function(Domain, Range), ET, R, C) ->
- {RT1, RR1} = t_solve_remote(Domain, ET, R, C),
- {RT2, RR2} = t_solve_remote(Range, ET, R, C),
- {?function(RT1, RT2), RR1 ++ RR2};
-t_solve_remote(?list(Types, Term, Size), ET, R, C) ->
- {RT1, RR1} = t_solve_remote(Types, ET, R, C),
- {RT2, RR2} = t_solve_remote(Term, ET, R, C),
- {?list(RT1, RT2, Size), RR1 ++ RR2};
-t_solve_remote(?product(Types), ET, R, C) ->
- {RL, RR} = list_solve_remote(Types, ET, R, C),
- {?product(RL), RR};
-t_solve_remote(?opaque(Set), ET, R, C) ->
- List = ordsets:to_list(Set),
- {NewList, RR} = opaques_solve_remote(List, ET, R, C),
- {?opaque(ordsets:from_list(NewList)), RR};
-t_solve_remote(?tuple(?any, _, _) = T, _ET, _R, _C) -> {T, []};
-t_solve_remote(?tuple(Types, _Arity, _Tag), ET, R, C) ->
- {RL, RR} = list_solve_remote(Types, ET, R, C),
- {t_tuple(RL), RR};
-t_solve_remote(?tuple_set(Set), ET, R, C) ->
- {NewTuples, RR} = tuples_solve_remote(Set, ET, R, C),
- {t_sup(NewTuples), RR};
-t_solve_remote(?remote(Set), ET, R, C) ->
- RemoteList = ordsets:to_list(Set),
- {RL, RR} = list_solve_remote_type(RemoteList, ET, R, C),
- {t_sup(RL), RR};
-t_solve_remote(?union(List), ET, R, C) ->
- {RL, RR} = list_solve_remote(List, ET, R, C),
- {t_sup(RL), RR};
-t_solve_remote(T, _ET, _R, _C) -> {T, []}.
-
-t_solve_remote_type(#remote{mod = RemMod, name = Name, args = Args0} = RemType,
- ET, R, C) ->
- Args = lists:map(fun(A) ->
- {Arg, _} = t_solve_remote(A, ET, R, C),
- Arg
- end, Args0),
- ArgsLen = length(Args),
- case dict:find(RemMod, R) of
- error ->
- self() ! {self(), ext_types, {RemMod, Name, ArgsLen}},
- {t_any(), []};
- {ok, RemDict} ->
- MFA = {RemMod, Name, ArgsLen},
- case sets:is_element(MFA, ET) of
- true ->
- case lookup_type(Name, ArgsLen, RemDict) of
- {type, {_Mod, Type, ArgNames}} ->
- {NewType, NewCycle, NewRR} =
- case can_unfold_more(RemType, C) of
- true ->
- List = lists:zip(ArgNames, Args),
- TmpVarDict = dict:from_list(List),
- {t_from_form(Type, RemDict, TmpVarDict), [RemType|C], []};
- false ->
- {t_any(), C, [RemType]}
- end,
- {RT, RR} = t_solve_remote(NewType, ET, R, NewCycle),
- RetRR = NewRR ++ RR,
- RT1 =
- case lists:member(RemType, RetRR) of
- true -> t_limit(RT, ?REC_TYPE_LIMIT);
- false -> RT
- end,
- {RT1, RetRR};
- {opaque, {Mod, Type, ArgNames}} ->
- List = lists:zip(ArgNames, Args),
- TmpVarDict = dict:from_list(List),
- {Rep, NewCycle, NewRR} =
- case can_unfold_more(RemType, C) of
- true ->
- {t_from_form(Type, RemDict, TmpVarDict), [RemType|C], []};
- false ->
- {t_any(), C, [RemType]}
- end,
- {NewRep, RR} = t_solve_remote(Rep, ET, R, NewCycle),
- RetRR = NewRR ++ RR,
- RT1 =
- case lists:member(RemType, RetRR) of
- true -> t_limit(NewRep, ?REC_TYPE_LIMIT);
- false -> NewRep
- end,
- {skip_opaque_alias(RT1, Mod, Name, Args), RetRR};
- error ->
- Msg = io_lib:format("Unable to find remote type ~w:~w()\n",
- [RemMod, Name]),
- throw({error, Msg})
- end;
- false ->
- self() ! {self(), ext_types, {RemMod, Name, ArgsLen}},
- {t_any(), []}
- end
- end.
-
-list_solve_remote([], _ET, _R, _C) ->
- {[], []};
-list_solve_remote([Type|Types], ET, R, C) ->
- {RT, RR1} = t_solve_remote(Type, ET, R, C),
- {RL, RR2} = list_solve_remote(Types, ET, R, C),
- {[RT|RL], RR1 ++ RR2}.
-
-list_solve_remote_type([], _ET, _R, _C) ->
- {[], []};
-list_solve_remote_type([Type|Types], ET, R, C) ->
- {RT, RR1} = t_solve_remote_type(Type, ET, R, C),
- {RL, RR2} = list_solve_remote_type(Types, ET, R, C),
- {[RT|RL], RR1 ++ RR2}.
-
-opaques_solve_remote([], _ET, _R, _C) ->
- {[], []};
-opaques_solve_remote([#opaque{struct = Struct} = Remote|Tail], ET, R, C) ->
- {RT, RR1} = t_solve_remote(Struct, ET, R, C),
- {LOp, RR2} = opaques_solve_remote(Tail, ET, R, C),
- {[Remote#opaque{struct = RT}|LOp], RR1 ++ RR2}.
-
-tuples_solve_remote([], _ET, _R, _C) ->
- {[], []};
-tuples_solve_remote([{_Sz, Tuples}|Tail], ET, R, C) ->
- {RL, RR1} = list_solve_remote(Tuples, ET, R, C),
- {LSzTpls, RR2} = tuples_solve_remote(Tail, ET, R, C),
- {RL ++ LSzTpls, RR1 ++ RR2}.
-
%%-----------------------------------------------------------------------------
%% Unit type. Signals non termination.
%%
@@ -2264,14 +2138,19 @@ expand_range_from_set(Range = ?int_range(From, To), Set) ->
-spec t_sup([erl_type()]) -> erl_type().
-t_sup([?any|_]) ->
- ?any;
-t_sup([H1, H2|T]) ->
- t_sup([t_sup(H1, H2)|T]);
-t_sup([H]) ->
- subst_all_vars_to_any(H);
-t_sup([]) ->
- ?none.
+t_sup([]) -> ?none;
+t_sup(Ts) ->
+ case lists:any(fun is_any/1, Ts) of
+ true -> ?any;
+ false ->
+ t_sup1(Ts, [])
+ end.
+
+t_sup1([H1, H2|T], L) ->
+ t_sup1(T, [t_sup(H1, H2)|L]);
+t_sup1([T], []) -> subst_all_vars_to_any(T);
+t_sup1(Ts, L) ->
+ t_sup1(Ts++L, []).
-spec t_sup(erl_type(), erl_type()) -> erl_type().
@@ -3089,12 +2968,12 @@ t_subst_aux(T, _VarMap) ->
subst_all_remote(Type0, Substitute) ->
Map =
fun(Type) ->
- case erl_types:t_is_remote(Type) of
+ case t_is_remote(Type) of
true -> Substitute;
false -> Type
end
end,
- erl_types:t_map(Map, Type0).
+ t_map(Map, Type0).
%%-----------------------------------------------------------------------------
%% Unification
@@ -3776,7 +3655,7 @@ t_abstract_records(?tuple(Elements, Arity, ?atom(_) = Tag), RecDict) ->
[TagAtom] = atom_vals(Tag),
case lookup_record(TagAtom, Arity - 1, RecDict) of
error -> t_tuple([t_abstract_records(E, RecDict) || E <- Elements]);
- {ok, Fields} -> t_tuple([Tag|[T || {_Name, T} <- Fields]])
+ {ok, Fields} -> t_tuple([Tag|[T || {_Name, _Abstr, T} <- Fields]])
end;
t_abstract_records(?tuple(Elements, _Arity, _Tag), RecDict) ->
t_tuple([t_abstract_records(E, RecDict) || E <- Elements]);
@@ -3997,7 +3876,8 @@ record_to_string(Tag, [_|Fields], FieldNames, RecDict) ->
FieldStrings = record_fields_to_string(Fields, FieldNames, RecDict, []),
"#" ++ atom_to_string(Tag) ++ "{" ++ string:join(FieldStrings, ",") ++ "}".
-record_fields_to_string([F|Fs], [{FName, _DefType}|FDefs], RecDict, Acc) ->
+record_fields_to_string([F|Fs], [{FName, _Abstr, _DefType}|FDefs],
+ RecDict, Acc) ->
NewAcc =
case t_is_equal(F, t_any()) orelse t_is_any_atom('undefined', F) of
true -> Acc;
@@ -4023,7 +3903,7 @@ record_field_diffs_to_string(?tuple([_|Fs], Arity, Tag), RecDict) ->
FieldDiffs = field_diffs(Fs, FieldNames, RecDict, []),
string:join(FieldDiffs, " and ").
-field_diffs([F|Fs], [{FName, DefType}|FDefs], RecDict, Acc) ->
+field_diffs([F|Fs], [{FName, _Abstr, DefType}|FDefs], RecDict, Acc) ->
%% Don't care about opaqueness for now.
NewAcc =
case not t_is_none(t_inf(F, DefType)) of
@@ -4071,357 +3951,476 @@ mod_name(Mod, Name) ->
%%
%%=============================================================================
--spec t_from_form(parse_form()) -> erl_type().
+-type type_names() :: [type_key() | record_key()].
-t_from_form(Form) ->
- t_from_form(Form, dict:new()).
+-spec t_from_form(parse_form(), sets:set(mfa()),
+ module(), mod_records()) -> erl_type().
--spec t_from_form(parse_form(), type_table()) -> erl_type().
+t_from_form(Form, ExpTypes, Module, RecDict) ->
+ t_from_form(Form, ExpTypes, Module, RecDict, dict:new()).
-t_from_form(Form, RecDict) ->
- t_from_form(Form, RecDict, dict:new()).
+-spec t_from_form(parse_form(), sets:set(mfa()),
+ module(), mod_records(), var_table()) -> erl_type().
--spec t_from_form(parse_form(), type_table(), var_table()) -> erl_type().
+t_from_form(Form, ExpTypes, Module, RecDict, VarDict) ->
+ {T, _} = t_from_form1(Form, [], ExpTypes, Module, RecDict, VarDict),
+ T.
-t_from_form(Form, RecDict, VarDict) ->
- {T, _R} = t_from_form(Form, [], RecDict, VarDict),
+%% Replace external types with with none().
+-spec t_from_form_without_remote(parse_form(), type_table()) -> erl_type().
+
+t_from_form_without_remote(Form, TypeTable) ->
+ Module = mod,
+ RecDict = dict:from_list([{Module, TypeTable}]),
+ ExpTypes = replace_by_none,
+ {T, _} = t_from_form1(Form, [], ExpTypes, Module, RecDict, dict:new()),
T.
--type type_names() :: [type_key() | record_key()].
+%% REC_TYPE_LIMIT is used for limiting the depth of recursive types.
+%% EXPAND_LIMIT is used for limiting the size of types by
+%% limiting the number of elements of lists within one type form.
+%% EXPAND_DEPTH is used in conjunction with EXPAND_LIMIT to make the
+%% types balanced (unions will otherwise collapse to any()) by limiting
+%% the depth the same way as t_limit/2 does.
+
+-type expand_limit() :: integer().
--spec t_from_form(parse_form(), type_names(), type_table(), var_table()) ->
- {erl_type(), type_names()}.
+-type expand_depth() :: integer().
-t_from_form({var, _L, '_'}, _TypeNames, _RecDict, _VarDict) ->
- {t_any(), []};
-t_from_form({var, _L, Name}, _TypeNames, _RecDict, VarDict) ->
- case dict:find(Name, VarDict) of
- error -> {t_var(Name), []};
- {ok, Val} -> {Val, []}
+t_from_form1(Form, TypeNames, ET, M, MR, V) ->
+ t_from_form1(Form, TypeNames, ET, M, MR, V, ?EXPAND_DEPTH).
+
+t_from_form1(Form, TypeNames, ET, M, MR, V, D) ->
+ L = ?EXPAND_LIMIT,
+ {T, L1} = t_from_form(Form, TypeNames, ET, M, MR, V, D, L),
+ if
+ L1 =< 0, D > 1 ->
+ D1 = D div 2,
+ t_from_form1(Form, TypeNames, ET, M, MR, V, D1);
+ true ->
+ {T, L1}
+ end.
+
+-spec t_from_form(parse_form(), type_names(),
+ sets:set(mfa()) | 'replace_by_none',
+ module(), mod_records(), var_table(),
+ expand_depth(), expand_limit())
+ -> {erl_type(), expand_limit()}.
+
+%% If there is something wrong with parse_form()
+%% throw({error, io_lib:chars()} is called;
+%% for unknown remote types
+%% self() ! {self(), ext_types, {RemMod, Name, ArgsLen}}
+%% is called, unless 'replace_by_none' is given.
+%%
+%% It is assumed that M can be found in MR.
+
+t_from_form(_, _TypeNames, _ET, _M, _MR, _V, D, L) when D =< 0 ; L =< 0 ->
+ {t_any(), L};
+t_from_form({var, _L, '_'}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_any(), L};
+t_from_form({var, _L, Name}, _TypeNames, _ET, _M, _MR, V, _D, L) ->
+ case dict:find(Name, V) of
+ error -> {t_var(Name), L};
+ {ok, Val} -> {Val, L}
end;
-t_from_form({ann_type, _L, [_Var, Type]}, TypeNames, RecDict, VarDict) ->
- t_from_form(Type, TypeNames, RecDict, VarDict);
-t_from_form({paren_type, _L, [Type]}, TypeNames, RecDict, VarDict) ->
- t_from_form(Type, TypeNames, RecDict, VarDict);
+t_from_form({ann_type, _L, [_Var, Type]}, TypeNames, ET, M, MR, V, D, L) ->
+ t_from_form(Type, TypeNames, ET, M, MR, V, D, L);
+t_from_form({paren_type, _L, [Type]}, TypeNames, ET, M, MR, V, D, L) ->
+ t_from_form(Type, TypeNames, ET, M, MR, V, D, L);
t_from_form({remote_type, _L, [{atom, _, Module}, {atom, _, Type}, Args]},
- TypeNames, RecDict, VarDict) ->
- {L, R} = list_from_form(Args, TypeNames, RecDict, VarDict),
- {t_remote(Module, Type, L), R};
-t_from_form({atom, _L, Atom}, _TypeNames, _RecDict, _VarDict) ->
- {t_atom(Atom), []};
-t_from_form({integer, _L, Int}, _TypeNames, _RecDict, _VarDict) ->
- {t_integer(Int), []};
-t_from_form({op, _L, _Op, _Arg} = Op, _TypeNames, _RecDict, _VarDict) ->
+ TypeNames, ET, M, MR, V, D, L) ->
+ remote_from_form(Module, Type, Args, TypeNames, ET, M, MR, V, D, L);
+t_from_form({atom, _L, Atom}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_atom(Atom), L};
+t_from_form({integer, _L, Int}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_integer(Int), L};
+t_from_form({op, _L, _Op, _Arg} = Op, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
case erl_eval:partial_eval(Op) of
{integer, _, Val} ->
- {t_integer(Val), []};
+ {t_integer(Val), L};
_ -> throw({error, io_lib:format("Unable to evaluate type ~w\n", [Op])})
end;
t_from_form({op, _L, _Op, _Arg1, _Arg2} = Op, _TypeNames,
- _RecDict, _VarDict) ->
+ _ET, _M, _MR, _V, _D, L) ->
case erl_eval:partial_eval(Op) of
{integer, _, Val} ->
- {t_integer(Val), []};
+ {t_integer(Val), L};
_ -> throw({error, io_lib:format("Unable to evaluate type ~w\n", [Op])})
end;
-t_from_form({type, _L, any, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_any(), []};
-t_from_form({type, _L, arity, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_arity(), []};
-t_from_form({type, _L, atom, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_atom(), []};
-t_from_form({type, _L, binary, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_binary(), []};
+t_from_form({type, _L, any, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_any(), L};
+t_from_form({type, _L, arity, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_arity(), L};
+t_from_form({type, _L, atom, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_atom(), L};
+t_from_form({type, _L, binary, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_binary(), L};
t_from_form({type, _L, binary, [Base, Unit]} = Type,
- _TypeNames, _RecDict, _VarDict) ->
+ _TypeNames, _ET, _M, _MR, _V, _D, L) ->
case {erl_eval:partial_eval(Base), erl_eval:partial_eval(Unit)} of
{{integer, _, B}, {integer, _, U}} when B >= 0, U >= 0 ->
- {t_bitstr(U, B), []};
+ {t_bitstr(U, B), L};
_ -> throw({error, io_lib:format("Unable to evaluate type ~w\n", [Type])})
end;
-t_from_form({type, _L, bitstring, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_bitstr(), []};
-t_from_form({type, _L, bool, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_boolean(), []}; % XXX: Temporarily
-t_from_form({type, _L, boolean, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_boolean(), []};
-t_from_form({type, _L, byte, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_byte(), []};
-t_from_form({type, _L, char, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_char(), []};
-t_from_form({type, _L, float, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_float(), []};
-t_from_form({type, _L, function, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_fun(), []};
-t_from_form({type, _L, 'fun', []}, _TypeNames, _RecDict, _VarDict) ->
- {t_fun(), []};
+t_from_form({type, _L, bitstring, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_bitstr(), L};
+t_from_form({type, _L, bool, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_boolean(), L}; % XXX: Temporarily
+t_from_form({type, _L, boolean, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_boolean(), L};
+t_from_form({type, _L, byte, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_byte(), L};
+t_from_form({type, _L, char, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_char(), L};
+t_from_form({type, _L, float, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_float(), L};
+t_from_form({type, _L, function, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_fun(), L};
+t_from_form({type, _L, 'fun', []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_fun(), L};
t_from_form({type, _L, 'fun', [{type, _, any}, Range]}, TypeNames,
- RecDict, VarDict) ->
- {T, R} = t_from_form(Range, TypeNames, RecDict, VarDict),
- {t_fun(T), R};
+ ET, M, MR, V, D, L) ->
+ {T, L1} = t_from_form(Range, TypeNames, ET, M, MR, V, D - 1, L - 1),
+ {t_fun(T), L1};
t_from_form({type, _L, 'fun', [{type, _, product, Domain}, Range]},
- TypeNames, RecDict, VarDict) ->
- {L, R1} = list_from_form(Domain, TypeNames, RecDict, VarDict),
- {T, R2} = t_from_form(Range, TypeNames, RecDict, VarDict),
- {t_fun(L, T), R1 ++ R2};
-t_from_form({type, _L, identifier, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_identifier(), []};
-t_from_form({type, _L, integer, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_integer(), []};
-t_from_form({type, _L, iodata, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_iodata(), []};
-t_from_form({type, _L, iolist, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_iolist(), []};
-t_from_form({type, _L, list, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_list(), []};
-t_from_form({type, _L, list, [Type]}, TypeNames, RecDict, VarDict) ->
- {T, R} = t_from_form(Type, TypeNames, RecDict, VarDict),
- {t_list(T), R};
-t_from_form({type, _L, map, _}, TypeNames, RecDict, VarDict) ->
- builtin_type(map, t_map([]), TypeNames, RecDict, VarDict);
-t_from_form({type, _L, mfa, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_mfa(), []};
-t_from_form({type, _L, module, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_module(), []};
-t_from_form({type, _L, nil, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_nil(), []};
-t_from_form({type, _L, neg_integer, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_neg_integer(), []};
-t_from_form({type, _L, non_neg_integer, []}, _TypeNames, _RecDict,
- _VarDict) ->
- {t_non_neg_integer(), []};
-t_from_form({type, _L, no_return, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_unit(), []};
-t_from_form({type, _L, node, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_node(), []};
-t_from_form({type, _L, none, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_none(), []};
-t_from_form({type, _L, nonempty_list, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_nonempty_list(), []};
-t_from_form({type, _L, nonempty_list, [Type]}, TypeNames, RecDict, VarDict) ->
- {T, R} = t_from_form(Type, TypeNames, RecDict, VarDict),
- {t_nonempty_list(T), R};
+ TypeNames, ET, M, MR, V, D, L) ->
+ {Dom1, L1} = list_from_form(Domain, TypeNames, ET, M, MR, V, D, L),
+ {Ran1, L2} = t_from_form(Range, TypeNames, ET, M, MR, V, D - 1, L1),
+ {t_fun(Dom1, Ran1), L2};
+t_from_form({type, _L, identifier, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_identifier(), L};
+t_from_form({type, _L, integer, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_integer(), L};
+t_from_form({type, _L, iodata, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_iodata(), L};
+t_from_form({type, _L, iolist, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_iolist(), L};
+t_from_form({type, _L, list, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_list(), L};
+t_from_form({type, _L, list, [Type]}, TypeNames, ET, M, MR, V, D, L) ->
+ {T, L1} = t_from_form(Type, TypeNames, ET, M, MR, V, D - 1, L - 1),
+ {t_list(T), L1};
+t_from_form({type, _L, map, _}, TypeNames, ET, M, MR, V, D, L) ->
+ builtin_type(map, t_map([]), TypeNames, ET, M, MR, V, D, L);
+t_from_form({type, _L, mfa, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_mfa(), L};
+t_from_form({type, _L, module, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_module(), L};
+t_from_form({type, _L, nil, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_nil(), L};
+t_from_form({type, _L, neg_integer, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_neg_integer(), L};
+t_from_form({type, _L, non_neg_integer, []}, _TypeNames, _ET, _M, _MR,
+ _V, _D, L) ->
+ {t_non_neg_integer(), L};
+t_from_form({type, _L, no_return, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_unit(), L};
+t_from_form({type, _L, node, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_node(), L};
+t_from_form({type, _L, none, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_none(), L};
+t_from_form({type, _L, nonempty_list, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_nonempty_list(), L};
+t_from_form({type, _L, nonempty_list, [Type]}, TypeNames, ET, M, MR, V, D, L) ->
+ {T, L1} = t_from_form(Type, TypeNames, ET, M, MR, V, D, L - 1),
+ {t_nonempty_list(T), L1};
t_from_form({type, _L, nonempty_improper_list, [Cont, Term]}, TypeNames,
- RecDict, VarDict) ->
- {T1, R1} = t_from_form(Cont, TypeNames, RecDict, VarDict),
- {T2, R2} = t_from_form(Term, TypeNames, RecDict, VarDict),
- {t_cons(T1, T2), R1 ++ R2};
+ ET, M, MR, V, D, L) ->
+ {T1, L1} = t_from_form(Cont, TypeNames, ET, M, MR, V, D, L - 1),
+ {T2, L2} = t_from_form(Term, TypeNames, ET, M, MR, V, D, L1),
+ {t_cons(T1, T2), L2};
t_from_form({type, _L, nonempty_maybe_improper_list, []}, _TypeNames,
- _RecDict, _VarDict) ->
- {t_cons(?any, ?any), []};
+ _ET, _M, _MR, _V, _D, L) ->
+ {t_cons(?any, ?any), L};
t_from_form({type, _L, nonempty_maybe_improper_list, [Cont, Term]},
- TypeNames, RecDict, VarDict) ->
- {T1, R1} = t_from_form(Cont, TypeNames, RecDict, VarDict),
- {T2, R2} = t_from_form(Term, TypeNames, RecDict, VarDict),
- {t_cons(T1, T2), R1 ++ R2};
-t_from_form({type, _L, nonempty_string, []}, _TypeNames, _RecDict,
- _VarDict) ->
- {t_nonempty_string(), []};
-t_from_form({type, _L, number, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_number(), []};
-t_from_form({type, _L, pid, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_pid(), []};
-t_from_form({type, _L, port, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_port(), []};
-t_from_form({type, _L, pos_integer, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_pos_integer(), []};
+ TypeNames, ET, M, MR, V, D, L) ->
+ {T1, L1} = t_from_form(Cont, TypeNames, ET, M, MR, V, D, L - 1),
+ {T2, L2} = t_from_form(Term, TypeNames, ET, M, MR, V, D, L1),
+ {t_cons(T1, T2), L2};
+t_from_form({type, _L, nonempty_string, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_nonempty_string(), L};
+t_from_form({type, _L, number, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_number(), L};
+t_from_form({type, _L, pid, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_pid(), L};
+t_from_form({type, _L, port, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_port(), L};
+t_from_form({type, _L, pos_integer, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_pos_integer(), L};
t_from_form({type, _L, maybe_improper_list, []}, _TypeNames,
- _RecDict, _VarDict) ->
- {t_maybe_improper_list(), []};
+ _ET, _M, _MR, _V, _D, L) ->
+ {t_maybe_improper_list(), L};
t_from_form({type, _L, maybe_improper_list, [Content, Termination]},
- TypeNames, RecDict, VarDict) ->
- {T1, R1} = t_from_form(Content, TypeNames, RecDict, VarDict),
- {T2, R2} = t_from_form(Termination, TypeNames, RecDict, VarDict),
- {t_maybe_improper_list(T1, T2), R1 ++ R2};
-t_from_form({type, _L, product, Elements}, TypeNames, RecDict, VarDict) ->
- {L, R} = list_from_form(Elements, TypeNames, RecDict, VarDict),
- {t_product(L), R};
+ TypeNames, ET, M, MR, V, D, L) ->
+ {T1, L1} = t_from_form(Content, TypeNames, ET, M, MR, V, D, L - 1),
+ {T2, L2} = t_from_form(Termination, TypeNames, ET, M, MR, V, D, L1),
+ {t_maybe_improper_list(T1, T2), L2};
+t_from_form({type, _L, product, Elements}, TypeNames, ET, M, MR, V, D, L) ->
+ {Lst, L1} = list_from_form(Elements, TypeNames, ET, M, MR, V, D - 1, L),
+ {t_product(Lst), L1};
t_from_form({type, _L, range, [From, To]} = Type,
- _TypeNames, _RecDict, _VarDict) ->
+ _TypeNames, _ET, _M, _MR, _V, _D, L) ->
case {erl_eval:partial_eval(From), erl_eval:partial_eval(To)} of
{{integer, _, FromVal}, {integer, _, ToVal}} ->
- {t_from_range(FromVal, ToVal), []};
+ {t_from_range(FromVal, ToVal), L};
_ -> throw({error, io_lib:format("Unable to evaluate type ~w\n", [Type])})
end;
-t_from_form({type, _L, record, [Name|Fields]}, TypeNames, RecDict, VarDict) ->
- record_from_form(Name, Fields, TypeNames, RecDict, VarDict);
-t_from_form({type, _L, reference, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_reference(), []};
-t_from_form({type, _L, string, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_string(), []};
-t_from_form({type, _L, term, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_any(), []};
-t_from_form({type, _L, timeout, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_timeout(), []};
-t_from_form({type, _L, tuple, any}, _TypeNames, _RecDict, _VarDict) ->
- {t_tuple(), []};
-t_from_form({type, _L, tuple, Args}, TypeNames, RecDict, VarDict) ->
- {L, R} = list_from_form(Args, TypeNames, RecDict, VarDict),
- {t_tuple(L), R};
-t_from_form({type, _L, union, Args}, TypeNames, RecDict, VarDict) ->
- {L, R} = list_from_form(Args, TypeNames, RecDict, VarDict),
- {t_sup(L), R};
-t_from_form({user_type, _L, Name, Args}, TypeNames, RecDict, VarDict) ->
- type_from_form(Name, Args, TypeNames, RecDict, VarDict);
-t_from_form({type, _L, Name, Args}, TypeNames, RecDict, VarDict) ->
+t_from_form({type, _L, record, [Name|Fields]}, TypeNames, ET, M, MR, V, D, L) ->
+ record_from_form(Name, Fields, TypeNames, ET, M, MR, V, D, L);
+t_from_form({type, _L, reference, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_reference(), L};
+t_from_form({type, _L, string, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_string(), L};
+t_from_form({type, _L, term, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_any(), L};
+t_from_form({type, _L, timeout, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_timeout(), L};
+t_from_form({type, _L, tuple, any}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_tuple(), L};
+t_from_form({type, _L, tuple, Args}, TypeNames, ET, M, MR, V, D, L) ->
+ {Lst, L1} = list_from_form(Args, TypeNames, ET, M, MR, V, D - 1, L),
+ {t_tuple(Lst), L1};
+t_from_form({type, _L, union, Args}, TypeNames, ET, M, MR, V, D, L) ->
+ {Lst, L1} = list_from_form(Args, TypeNames, ET, M, MR, V, D, L),
+ {t_sup(Lst), L1};
+t_from_form({user_type, _L, Name, Args}, TypeNames, ET, M, MR, V, D, L) ->
+ type_from_form(Name, Args, TypeNames, ET, M, MR, V, D, L);
+t_from_form({type, _L, Name, Args}, TypeNames, ET, M, MR, V, D, L) ->
%% Compatibility: modules compiled before Erlang/OTP 18.0.
- type_from_form(Name, Args, TypeNames, RecDict, VarDict);
+ type_from_form(Name, Args, TypeNames, ET, M, MR, V, D, L);
t_from_form({opaque, _L, Name, {Mod, Args, Rep}}, _TypeNames,
- _RecDict, _VarDict) ->
- {t_opaque(Mod, Name, Args, Rep), []}.
-
-builtin_type(Name, Type, TypeNames, RecDict, VarDict) ->
- case lookup_type(Name, 0, RecDict) of
- {_, {_M, _T, _A}} ->
- type_from_form(Name, [], TypeNames, RecDict, VarDict);
+ _ET, _M, _MR, _V, _D, L) ->
+ %% XXX. To be removed.
+ {t_opaque(Mod, Name, Args, Rep), L}.
+
+builtin_type(Name, Type, TypeNames, ET, M, MR, V, D, L) ->
+ case dict:find(M, MR) of
+ {ok, R} ->
+ case lookup_type(Name, 0, R) of
+ {_, {{_M, _F, _A}, _T}} ->
+ type_from_form(Name, [], TypeNames, ET, M, MR, V, D, L);
+ error ->
+ {Type, L}
+ end;
error ->
- {Type, []}
+ {Type, L}
end.
-type_from_form(Name, Args, TypeNames, RecDict, VarDict) ->
+type_from_form(Name, Args, TypeNames, ET, M, MR, V, D, L) ->
ArgsLen = length(Args),
- ArgTypes = forms_to_types(Args, TypeNames, RecDict, VarDict),
- case lookup_type(Name, ArgsLen, RecDict) of
- {type, {_Module, Type, ArgNames}} ->
- TypeName = {type, Name, ArgsLen},
+ {ArgTypes, L1} = list_from_form(Args, TypeNames, ET, M, MR, V, D, L),
+ {ok, R} = dict:find(M, MR),
+ case lookup_type(Name, ArgsLen, R) of
+ {type, {{Module, Form, ArgNames}, _Type}} ->
+ TypeName = {type, Module, Name, ArgsLen},
case can_unfold_more(TypeName, TypeNames) of
true ->
List = lists:zip(ArgNames, ArgTypes),
- TmpVarDict = dict:from_list(List),
- {T, R} = t_from_form(Type, [TypeName|TypeNames],
- RecDict, TmpVarDict),
- case lists:member(TypeName, R) of
- true -> {t_limit(T, ?REC_TYPE_LIMIT), R};
- false -> {T, R}
- end;
- false -> {t_any(), [TypeName]}
+ TmpV = dict:from_list(List),
+ t_from_form(Form, [TypeName|TypeNames], ET, M, MR, TmpV, D, L1);
+ false ->
+ {t_any(), L1}
end;
- {opaque, {Module, Type, ArgNames}} ->
- TypeName = {opaque, Name, ArgsLen},
- {Rep, Rret} =
+ {opaque, {{Module, Form, ArgNames}, Type}} ->
+ TypeName = {opaque, Module, Name, ArgsLen},
+ {Rep, L2} =
case can_unfold_more(TypeName, TypeNames) of
true ->
List = lists:zip(ArgNames, ArgTypes),
- TmpVarDict = dict:from_list(List),
- {T, R} = t_from_form(Type, [TypeName|TypeNames],
- RecDict, TmpVarDict),
- case lists:member(TypeName, R) of
- true -> {t_limit(T, ?REC_TYPE_LIMIT), R};
- false -> {T, R}
- end;
- false -> {t_any(), [TypeName]}
+ TmpV = dict:from_list(List),
+ t_from_form(Form, [TypeName|TypeNames], ET, M, MR, TmpV, D, L1);
+ false -> {t_any(), L1}
end,
+ Rep1 = choose_opaque_type(Rep, Type),
Args2 = [subst_all_vars_to_any(ArgType) || ArgType <- ArgTypes],
- {skip_opaque_alias(Rep, Module, Name, Args2), Rret};
+ {skip_opaque_alias(Rep1, Module, Name, Args2), L2};
error ->
Msg = io_lib:format("Unable to find type ~w/~w\n", [Name, ArgsLen]),
throw({error, Msg})
end.
-forms_to_types(Forms, TypeNames, RecDict, VarDict) ->
- {Types, _} = list_from_form(Forms, TypeNames, RecDict, VarDict),
- Types.
-
skip_opaque_alias(?opaque(_) = T, _Mod, _Name, _Args) -> T;
skip_opaque_alias(T, Module, Name, Args) ->
t_opaque(Module, Name, Args, T).
-record_from_form({atom, _, Name}, ModFields, TypeNames, RecDict, VarDict) ->
+remote_from_form(RemMod, Name, Args, TypeNames, ET, M, MR, V, D, L) ->
+ {ArgTypes, L1} = list_from_form(Args, TypeNames, ET, M, MR, V, D, L),
+ if
+ ET =:= replace_by_none ->
+ {t_none(), L1};
+ true ->
+ ArgsLen = length(Args),
+ case dict:find(RemMod, MR) of
+ error ->
+ self() ! {self(), ext_types, {RemMod, Name, ArgsLen}},
+ {t_any(), L1};
+ {ok, RemDict} ->
+ MFA = {RemMod, Name, ArgsLen},
+ case sets:is_element(MFA, ET) of
+ true ->
+ case lookup_type(Name, ArgsLen, RemDict) of
+ {type, {{_Mod, Form, ArgNames}, _Type}} ->
+ RemType = {type, RemMod, Name, ArgsLen},
+ case can_unfold_more(RemType, TypeNames) of
+ true ->
+ List = lists:zip(ArgNames, ArgTypes),
+ TmpVarDict = dict:from_list(List),
+ NewTypeNames = [RemType|TypeNames],
+ t_from_form(Form, NewTypeNames, ET,
+ RemMod, MR, TmpVarDict, D, L1);
+ false ->
+ {t_any(), L1}
+ end;
+ {opaque, {{Mod, Form, ArgNames}, Type}} ->
+ RemType = {opaque, RemMod, Name, ArgsLen},
+ List = lists:zip(ArgNames, ArgTypes),
+ TmpVarDict = dict:from_list(List),
+ {NewRep, L2} =
+ case can_unfold_more(RemType, TypeNames) of
+ true ->
+ NewTypeNames = [RemType|TypeNames],
+ t_from_form(Form, NewTypeNames, ET, RemMod, MR,
+ TmpVarDict, D, L1);
+ false ->
+ {t_any(), L1}
+ end,
+ NewRep1 = choose_opaque_type(NewRep, Type),
+ {skip_opaque_alias(NewRep1, Mod, Name, ArgTypes), L2};
+ error ->
+ Msg = io_lib:format("Unable to find remote type ~w:~w()\n",
+ [RemMod, Name]),
+ throw({error, Msg})
+ end;
+ false ->
+ self() ! {self(), ext_types, {RemMod, Name, ArgsLen}},
+ {t_any(), L1}
+ end
+ end
+ end.
+
+%% Opaque types (both local and remote) are problematic when it comes
+%% to the limits (TypeNames, D, and L). The reason is that if any() is
+%% substituted for a more specialized subtype of an opaque type, the
+%% property stated along with decorate_with_opaque() (the type has to
+%% be a subtype of the declared type) no longer holds.
+%%
+%% The less than perfect remedy: if the opaque type created from a
+%% form is not a subset of the declared type, the declared type is
+%% used instead, effectively bypassing the limits, and potentially
+%% resulting in huge types.
+choose_opaque_type(Type, DeclType) ->
+ case
+ t_is_subtype(subst_all_vars_to_any(Type),
+ subst_all_vars_to_any(DeclType))
+ of
+ true -> Type;
+ false -> DeclType
+ end.
+
+record_from_form({atom, _, Name}, ModFields, TypeNames, ET, M, MR, V, D, L) ->
case can_unfold_more({record, Name}, TypeNames) of
true ->
- case lookup_record(Name, RecDict) of
+ {ok, R} = dict:find(M, MR),
+ case lookup_record(Name, R) of
{ok, DeclFields} ->
- TypeNames1 = [{record, Name}|TypeNames],
- AreTyped = [is_erl_type(FieldType)
- || {_FieldName, FieldType} <- DeclFields],
- {DeclFields1, R1} =
- case lists:all(fun(Elem) -> Elem end, AreTyped) of
- true -> {DeclFields, []};
- false -> fields_from_form(DeclFields, TypeNames1,
- RecDict, dict:new())
- end,
- {GetModRec, R2} = get_mod_record(ModFields, DeclFields1,
- TypeNames1,
- RecDict, VarDict),
+ NewTypeNames = [{record, Name}|TypeNames],
+ {GetModRec, L1} = get_mod_record(ModFields, DeclFields,
+ NewTypeNames, ET, M, MR, V, D, L),
case GetModRec of
{error, FieldName} ->
throw({error, io_lib:format("Illegal declaration of #~w{~w}\n",
[Name, FieldName])});
{ok, NewFields} ->
- {t_tuple(
- [t_atom(Name)|[Type || {_FieldName, Type} <- NewFields]]),
- R1 ++ R2}
+ {NewFields1, L2} =
+ fields_from_form(NewFields, NewTypeNames, ET, M, MR,
+ dict:new(), D, L1),
+ Rec = t_tuple(
+ [t_atom(Name)|[Type
+ || {_FieldName, Type} <- NewFields1]]),
+ {Rec, L2}
end;
error ->
throw({error, io_lib:format("Unknown record #~w{}\n", [Name])})
end;
- false -> {t_any(), []}
+ false ->
+ {t_any(), L}
end.
-get_mod_record([], DeclFields, _TypeNames, _RecDict, _VarDict) ->
- {{ok, DeclFields}, []};
-get_mod_record(ModFields, DeclFields, TypeNames, RecDict, VarDict) ->
- DeclFieldsDict = orddict:from_list(DeclFields),
- {ModFieldsDict, R} = build_field_dict(ModFields, TypeNames,
- RecDict, VarDict),
- case get_mod_record(DeclFieldsDict, ModFieldsDict, []) of
- {error, _FieldName} = Error -> {Error, R};
- {ok, FinalOrdDict} ->
- {{ok, [{FieldName, orddict:fetch(FieldName, FinalOrdDict)}
- || {FieldName, _} <- DeclFields]},
- R}
+get_mod_record([], DeclFields, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {{ok, DeclFields}, L};
+get_mod_record(ModFields, DeclFields, TypeNames, ET, M, MR, V, D, L) ->
+ DeclFieldsDict = lists:keysort(1, DeclFields),
+ {ModFieldsDict, L1} =
+ build_field_dict(ModFields, TypeNames, ET, M, MR, V, D, L),
+ case get_mod_record_types(DeclFieldsDict, ModFieldsDict, []) of
+ {error, _FieldName} = Error -> {Error, L1};
+ {ok, FinalKeyDict} ->
+ Fields = [lists:keyfind(FieldName, 1, FinalKeyDict)
+ || {FieldName, _, _} <- DeclFields],
+ {{ok, Fields}, L1}
end.
-build_field_dict(FieldTypes, TypeNames, RecDict, VarDict) ->
- build_field_dict(FieldTypes, TypeNames, RecDict, VarDict, []).
-
-build_field_dict([{type, _, field_type, [{atom, _, Name}, Type]}|Left],
- TypeNames, RecDict, VarDict, Acc) ->
- {T, R1} = t_from_form(Type, TypeNames, RecDict, VarDict),
- NewAcc = [{Name, T}|Acc],
- {D, R2} = build_field_dict(Left, TypeNames, RecDict, VarDict, NewAcc),
- {D, R1 ++ R2};
-build_field_dict([], _TypeNames, _RecDict, _VarDict, Acc) ->
- {orddict:from_list(Acc), []}.
-
-get_mod_record([{FieldName, DeclType}|Left1],
- [{FieldName, ModType}|Left2], Acc) ->
- ModTypeNoVars = subst_all_vars_to_any(ModType),
- case
- contains_remote(ModTypeNoVars)
- orelse contains_remote(DeclType)
- orelse t_is_subtype(ModTypeNoVars, DeclType)
- of
+build_field_dict(FieldTypes, TypeNames, ET, M, MR, V, D, L) ->
+ build_field_dict(FieldTypes, TypeNames, ET, M, MR, V, D, L, []).
+
+build_field_dict([{type, _, field_type, [{atom, _, Name}, Type]}|Left],
+ TypeNames, ET, M, MR, V, D, L, Acc) ->
+ {T, L1} = t_from_form(Type, TypeNames, ET, M, MR, V, D, L - 1),
+ %% The cached record field type (DeclType) in
+ %% get_mod_record_types()), was created with a similar call as TT.
+ %% Using T for the subtype test does not work since any() is not
+ %% always a subset of the field type.
+ TT = t_from_form(Type, ET, M, MR, V),
+ NewAcc = [{Name, Type, T, TT}|Acc],
+ {Dict, L2} =
+ build_field_dict(Left, TypeNames, ET, M, MR, V, D, L1, NewAcc),
+ {Dict, L2};
+build_field_dict([], _TypeNames, _ET, _M, _MR, _V, _D, L, Acc) ->
+ {lists:keysort(1, Acc), L}.
+
+get_mod_record_types([{FieldName, _Abstr, DeclType}|Left1],
+ [{FieldName, TypeForm, ModType, ModTypeTest}|Left2],
+ Acc) ->
+ ModTypeNoVars = subst_all_vars_to_any(ModTypeTest),
+ case t_is_subtype(ModTypeNoVars, DeclType) of
false -> {error, FieldName};
- true -> get_mod_record(Left1, Left2, [{FieldName, ModType}|Acc])
+ true -> get_mod_record_types(Left1, Left2,
+ [{FieldName, TypeForm, ModType}|Acc])
end;
-get_mod_record([{FieldName1, _DeclType} = DT|Left1],
- [{FieldName2, _ModType}|_] = List2,
- Acc) when FieldName1 < FieldName2 ->
- get_mod_record(Left1, List2, [DT|Acc]);
-get_mod_record(DeclFields, [], Acc) ->
- {ok, orddict:from_list(Acc ++ DeclFields)};
-get_mod_record(_, [{FieldName2, _ModType}|_], _Acc) ->
+get_mod_record_types([{FieldName1, _Abstr, _DeclType} = DT|Left1],
+ [{FieldName2, _FormType, _ModType, _TT}|_] = List2,
+ Acc) when FieldName1 < FieldName2 ->
+ get_mod_record_types(Left1, List2, [DT|Acc]);
+get_mod_record_types(Left1, [], Acc) ->
+ {ok, lists:keysort(1, Left1++Acc)};
+get_mod_record_types(_, [{FieldName2, _FormType, _ModType, _TT}|_], _Acc) ->
{error, FieldName2}.
-contains_remote(Type) ->
- TypeNoRemote = subst_all_remote(Type, t_none()),
- not t_is_equal(Type, TypeNoRemote).
-
-fields_from_form([], _TypeNames, _RecDict, _VarDict) ->
- {[], []};
-fields_from_form([{Name, Type}|Tail], TypeNames, RecDict,
- VarDict) ->
- {T, R1} = t_from_form(Type, TypeNames, RecDict, VarDict),
- {F, R2} = fields_from_form(Tail, TypeNames, RecDict, VarDict),
- {[{Name, T}|F], R1 ++ R2}.
-
-list_from_form([], _TypeNames, _RecDict, _VarDict) ->
- {[], []};
-list_from_form([H|Tail], TypeNames, RecDict, VarDict) ->
- {T, R1} = t_from_form(H, TypeNames, RecDict, VarDict),
- {L, R2} = list_from_form(Tail, TypeNames, RecDict, VarDict),
- {[T|L], R1 ++ R2}.
+%% It is important to create a limited version of the record type
+%% since nested record types can otherwise easily result in huge
+%% terms.
+fields_from_form([], _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {[], L};
+fields_from_form([{Name, Abstr, _Type}|Tail], TypeNames, ET, M, MR,
+ V, D, L) ->
+ {T, L1} = t_from_form(Abstr, TypeNames, ET, M, MR, V, D, L),
+ {F, L2} = fields_from_form(Tail, TypeNames, ET, M, MR, V, D, L1),
+ {[{Name, T}|F], L2}.
+
+list_from_form([], _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {[], L};
+list_from_form([H|Tail], TypeNames, ET, M, MR, V, D, L) ->
+ {H1, L1} = t_from_form(H, TypeNames, ET, M, MR, V, D, L - 1),
+ {T1, L2} = list_from_form(Tail, TypeNames, ET, M, MR, V, D, L1),
+ {[H1|T1], L2}.
+
+-spec t_var_names([erl_type()]) -> [atom()].
+
+t_var_names([{var, _, Name}|L]) when L =/= '_' ->
+ [Name|t_var_names(L)];
+t_var_names([]) ->
+ [].
-spec t_form_to_string(parse_form()) -> string().
@@ -4505,7 +4504,13 @@ t_form_to_string({type, _L, tuple, Args}) ->
t_form_to_string({type, _L, union, Args}) ->
string:join(t_form_to_string_list(Args), " | ");
t_form_to_string({type, _L, Name, []} = T) ->
- try t_to_string(t_from_form(T))
+ try
+ M = mod,
+ D0 = dict:new(),
+ MR = dict:from_list([{M, D0}]),
+ {T1, _} =
+ t_from_form(T, [], sets:new(), M, MR, D0, _Deep=1000, _ALot=100000),
+ t_to_string(T1)
catch throw:{error, _} -> atom_to_string(Name) ++ "()"
end;
t_form_to_string({user_type, _L, Name, List}) ->
@@ -4556,7 +4561,7 @@ is_erl_type(#c{}) -> true;
is_erl_type(_) -> false.
-spec lookup_record(atom(), type_table()) ->
- 'error' | {'ok', [{atom(), parse_form() | erl_type()}]}.
+ 'error' | {'ok', [{atom(), parse_form(), erl_type()}]}.
lookup_record(Tag, RecDict) when is_atom(Tag) ->
case dict:find({record, Tag}, RecDict) of
@@ -4571,7 +4576,7 @@ lookup_record(Tag, RecDict) when is_atom(Tag) ->
end.
-spec lookup_record(atom(), arity(), type_table()) ->
- 'error' | {'ok', [{atom(), erl_type()}]}.
+ 'error' | {'ok', [{atom(), parse_form(), erl_type()}]}.
lookup_record(Tag, Arity, RecDict) when is_atom(Tag) ->
case dict:find({record, Tag}, RecDict) of
diff --git a/lib/hipe/ppc/hipe_rtl_to_ppc.erl b/lib/hipe/ppc/hipe_rtl_to_ppc.erl
index 7dfa56df29..a55fc137c3 100644
--- a/lib/hipe/ppc/hipe_rtl_to_ppc.erl
+++ b/lib/hipe/ppc/hipe_rtl_to_ppc.erl
@@ -102,10 +102,18 @@ conv_insn(I, Map, Data) ->
end.
conv_fconv(I, Map, Data) ->
- %% Dst := (double)Src, where Dst is FP reg and Src is int reg
+ %% Dst := (double)Src, where Dst is FP reg and Src is GP reg or imm
{Dst, Map0} = conv_fpreg(hipe_rtl:fconv_dst(I), Map),
- {Src, Map1} = conv_src(hipe_rtl:fconv_src(I), Map0), % exclude imm src
- I2 = mk_fconv(Dst, Src),
+ {Src, Map1} = conv_src(hipe_rtl:fconv_src(I), Map0),
+ I2 =
+ case hipe_ppc:is_temp(Src) of
+ true ->
+ mk_fconv(Dst, Src);
+ false ->
+ Tmp = new_untagged_temp(),
+ mk_li(Tmp, Src,
+ mk_fconv(Dst, Tmp))
+ end,
{I2, Map1, Data}.
mk_fconv(Dst, Src) ->
diff --git a/lib/hipe/rtl/hipe_rtl.erl b/lib/hipe/rtl/hipe_rtl.erl
index bc61bec0bd..2f62dd79ad 100644
--- a/lib/hipe/rtl/hipe_rtl.erl
+++ b/lib/hipe/rtl/hipe_rtl.erl
@@ -413,11 +413,11 @@ rtl_info_update(Rtl, Info) -> Rtl#rtl{info=Info}.
%% move
%%
-mk_move(Dst, Src) -> #move{dst=Dst, src=Src}.
+mk_move(Dst, Src) -> false = is_fpreg(Dst), false = is_fpreg(Src), #move{dst=Dst, src=Src}.
move_dst(#move{dst=Dst}) -> Dst.
-move_dst_update(M, NewDst) -> M#move{dst=NewDst}.
+move_dst_update(M, NewDst) -> false = is_fpreg(NewDst), M#move{dst=NewDst}.
move_src(#move{src=Src}) -> Src.
-move_src_update(M, NewSrc) -> M#move{src=NewSrc}.
+move_src_update(M, NewSrc) -> false = is_fpreg(NewSrc), M#move{src=NewSrc}.
%% is_move(#move{}) -> true;
%% is_move(_) -> false.
@@ -469,7 +469,11 @@ phi_remove_pred(Phi, Pred) ->
case NewArgList of
[Arg] -> %% the phi should be turned into a move instruction
{_Label,Var} = Arg,
- mk_move(phi_dst(Phi), Var);
+ Dst = phi_dst(Phi),
+ case {is_fpreg(Dst), is_fpreg(Var)} of
+ {true, true} -> mk_fmove(Dst, Var);
+ {false, false} -> mk_move(Dst, Var)
+ end;
%% io:format("~nPhi (~w) turned into move (~w) when removing pred ~w~n",[Phi,Move,Pred]),
[_|_] ->
Phi#phi{arglist=NewArgList}
@@ -836,11 +840,11 @@ fp_unop_op(#fp_unop{op=Op}) -> Op.
%% fmove
%%
-mk_fmove(X, Y) -> #fmove{dst=X, src=Y}.
+mk_fmove(X, Y) -> true = is_fpreg(X), true = is_fpreg(Y), #fmove{dst=X, src=Y}.
fmove_dst(#fmove{dst=Dst}) -> Dst.
-fmove_dst_update(M, NewDst) -> M#fmove{dst=NewDst}.
+fmove_dst_update(M, NewDst) -> true = is_fpreg(NewDst), M#fmove{dst=NewDst}.
fmove_src(#fmove{src=Src}) -> Src.
-fmove_src_update(M, NewSrc) -> M#fmove{src=NewSrc}.
+fmove_src_update(M, NewSrc) -> true = is_fpreg(NewSrc), M#fmove{src=NewSrc}.
%%
%% fconv
diff --git a/lib/hipe/sparc/hipe_rtl_to_sparc.erl b/lib/hipe/sparc/hipe_rtl_to_sparc.erl
index dc001f865e..fd21be3ae7 100644
--- a/lib/hipe/sparc/hipe_rtl_to_sparc.erl
+++ b/lib/hipe/sparc/hipe_rtl_to_sparc.erl
@@ -85,17 +85,17 @@ conv_insn(I, Map, Data) ->
end.
conv_fconv(I, Map, Data) ->
- %% Dst := (double)Src, where Dst is FP reg and Src is int reg
- {Src, Map1} = conv_src(hipe_rtl:fconv_src(I), Map), % exclude imm src
+ %% Dst := (double)Src, where Dst is FP reg and Src is GP reg or imm
+ {Src, Map1} = conv_src(hipe_rtl:fconv_src(I), Map),
{Dst, Map2} = conv_fpreg(hipe_rtl:fconv_dst(I), Map1),
I2 = mk_fconv(Src, Dst),
{I2, Map2, Data}.
mk_fconv(Src, Dst) ->
CSP = hipe_sparc:mk_temp(14, 'untagged'), % o6
- Disp = hipe_sparc:mk_simm13(100),
- [hipe_sparc:mk_store('stw', Src, CSP, Disp),
- hipe_sparc:mk_pseudo_fload(CSP, Disp, Dst, true),
+ Offset = 100,
+ mk_store('stw', Src, CSP, Offset) ++
+ [hipe_sparc:mk_pseudo_fload(CSP, hipe_sparc:mk_simm13(Offset), Dst, true),
hipe_sparc:mk_fp_unary('fitod', Dst, Dst)].
conv_fmove(I, Map, Data) ->
diff --git a/lib/hipe/x86/hipe_rtl_to_x86.erl b/lib/hipe/x86/hipe_rtl_to_x86.erl
index d77e4fed3b..36da2f4d44 100644
--- a/lib/hipe/x86/hipe_rtl_to_x86.erl
+++ b/lib/hipe/x86/hipe_rtl_to_x86.erl
@@ -236,7 +236,7 @@ conv_insn(I, Map, Data) ->
#fconv{} ->
{Dst, Map0} = conv_dst(hipe_rtl:fconv_dst(I), Map),
{[], Src, Map1} = conv_src(hipe_rtl:fconv_src(I), Map0),
- I2 = [hipe_x86:mk_fmove(Src, Dst)],
+ I2 = conv_fconv(Dst, Src),
{I2, Map1, Data};
X ->
%% gctest??
@@ -712,6 +712,19 @@ vmap_lookup(Map, Key) ->
vmap_bind(Map, Key, Val) ->
gb_trees:insert(Key, Val, Map).
+%%% Finalise the conversion of an Integer-to-Float operation.
+
+conv_fconv(Dst, Src) ->
+ case hipe_x86:is_imm(Src) of
+ false ->
+ [hipe_x86:mk_fmove(Src, Dst)];
+ true ->
+ %% cvtsi2sd does not allow src to be an immediate
+ Tmp = new_untagged_temp(),
+ [hipe_x86:mk_move(Src, Tmp),
+ hipe_x86:mk_fmove(Tmp, Dst)]
+ end.
+
%%% Finalise the conversion of a 2-address FP operation.
conv_fp_unary(Dst, Src, FpUnOp) ->
diff --git a/lib/inets/doc/src/http_uri.xml b/lib/inets/doc/src/http_uri.xml
index e64c375bba..acbd79b201 100644
--- a/lib/inets/doc/src/http_uri.xml
+++ b/lib/inets/doc/src/http_uri.xml
@@ -63,6 +63,7 @@ host() = string()
port() = pos_integer()
path() = string() - Representing a file path or directory path
query() = string()
+fragment() = string()
]]></code>
<marker id="scheme_defaults"></marker>
@@ -92,13 +93,16 @@ query() = string()
<v>URI = uri() </v>
<v>Options = [Option] </v>
<v>Option = {ipv6_host_with_brackets, boolean()} |
- {scheme_defaults, scheme_defaults()}]</v>
- <v>Result = {Scheme, UserInfo, Host, Port, Path, Query}</v>
+ {scheme_defaults, scheme_defaults()} |
+ {fragment, boolean()}]</v>
+ <v>Result = {Scheme, UserInfo, Host, Port, Path, Query} |
+ {Scheme, UserInfo, Host, Port, Path, Query, Fragment}</v>
<v>UserInfo = user_info()</v>
<v>Host = host()</v>
<v>Port = pos_integer()</v>
<v>Path = path()</v>
<v>Query = query()</v>
+ <v>Fragment = fragment()</v>
<v>Reason = term() </v>
</type>
<desc>
@@ -111,6 +115,9 @@ query() = string()
a scheme not found in the scheme defaults) a port number must be
provided or else the parsing will fail. </p>
+ <p>If the fragment option is true, the URI fragment will be returned as
+ part of the parsing result, otherwise it is completely ignored.</p>
+
<marker id="encode"></marker>
</desc>
</func>
diff --git a/lib/inets/doc/src/httpd.xml b/lib/inets/doc/src/httpd.xml
index 4ca038cc99..20c8a6b1b1 100644
--- a/lib/inets/doc/src/httpd.xml
+++ b/lib/inets/doc/src/httpd.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>1997</year><year>2013</year>
+ <year>1997</year><year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -249,7 +249,16 @@
<p>Limits the size of the message header of HTTP request.
Defaults to 10240. </p>
</item>
-
+
+ <marker id="prop_max_content_length"></marker>
+ <tag>{max_content_length, integer()}</tag>
+ <item>
+ <p>Maximum Content-Length in an incoming request, in bytes. Requests
+ with content larger than this are answered with Status 413.
+ Defaults to 100000000 (100 MB).
+ </p>
+ </item>
+
<marker id="prop_max_uri"></marker>
<tag>{max_uri_size, integer()}</tag>
<item>
diff --git a/lib/inets/doc/src/notes.xml b/lib/inets/doc/src/notes.xml
index fb7034498c..7f73aa5e7b 100644
--- a/lib/inets/doc/src/notes.xml
+++ b/lib/inets/doc/src/notes.xml
@@ -32,7 +32,40 @@
<file>notes.xml</file>
</header>
- <section><title>Inets 5.10.4</title>
+ <section><title>Inets 5.10.5</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ mod_alias now handles https-URIs properly</p>
+ <p>
+ Consistent view of configuration parameter
+ keep_alive_timeout, should be presented in the
+ httpd:info/[1,2] function in the same unit as it is
+ inputted.</p>
+ <p>
+ Own Id: OTP-12436 Aux Id: seq12786 </p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Gracefully handle invalid content-lenght headers instead
+ of crashing in list_to_integer.</p>
+ <p>
+ Own Id: OTP-12429</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Inets 5.10.4</title>
<section><title>Fixed Bugs and Malfunctions</title>
<list>
diff --git a/lib/inets/examples/httpd_load_test/hdlt_slave.erl b/lib/inets/examples/httpd_load_test/hdlt_slave.erl
index 52af9b5b90..41361418bc 100644
--- a/lib/inets/examples/httpd_load_test/hdlt_slave.erl
+++ b/lib/inets/examples/httpd_load_test/hdlt_slave.erl
@@ -180,7 +180,7 @@ ssh_slave_start(Host, ErlCmd) ->
?DEBUG("ssh_exec_erl -> done", []),
{ok, Connection, Channel};
Error3 ->
- ?LOG("failed exec comand: ~p", [Error3]),
+ ?LOG("failed exec command: ~p", [Error3]),
throw({error, {ssh_exec_failed, Error3}})
end.
diff --git a/lib/inets/src/http_client/httpc_cookie.erl b/lib/inets/src/http_client/httpc_cookie.erl
index 134115bdfa..ed306a84f5 100644
--- a/lib/inets/src/http_client/httpc_cookie.erl
+++ b/lib/inets/src/http_client/httpc_cookie.erl
@@ -334,9 +334,23 @@ add_domain(Str, #http_cookie{domain_default = true}) ->
add_domain(Str, #http_cookie{domain = Domain}) ->
Str ++ "; $Domain=" ++ Domain.
+is_set_cookie_valid("") ->
+ %% an empty Set-Cookie header is not valid
+ false;
+is_set_cookie_valid([$=|_]) ->
+ %% a Set-Cookie header without name is not valid
+ false;
+is_set_cookie_valid(SetCookieHeader) ->
+ %% a Set-Cookie header without name/value is not valid
+ case string:chr(SetCookieHeader, $=) of
+ 0 -> false;
+ _ -> true
+ end.
+
parse_set_cookies(CookieHeaders, DefaultPathDomain) ->
- %% empty Set-Cookie header is invalid according to RFC but some sites violate it
- SetCookieHeaders = [Value || {"set-cookie", Value} <- CookieHeaders, Value /= ""],
+ %% filter invalid Set-Cookie headers
+ SetCookieHeaders = [Value || {"set-cookie", Value} <- CookieHeaders,
+ is_set_cookie_valid(Value)],
Cookies = [parse_set_cookie(SetCookieHeader, DefaultPathDomain) ||
SetCookieHeader <- SetCookieHeaders],
%% print_cookies("Parsed Cookies", Cookies),
@@ -348,6 +362,8 @@ parse_set_cookie(CookieHeader, {DefaultPath, DefaultDomain}) ->
Name = string:substr(CookieHeader, 1, Pos - 1),
{Value, Attrs} =
case string:substr(CookieHeader, Pos + 1) of
+ [] ->
+ {"", ""};
[$;|ValueAndAttrs] ->
{"", string:tokens(ValueAndAttrs, ";")};
ValueAndAttrs ->
diff --git a/lib/inets/src/http_lib/http_internal.hrl b/lib/inets/src/http_lib/http_internal.hrl
index 53b776c4e7..54425740b5 100644
--- a/lib/inets/src/http_lib/http_internal.hrl
+++ b/lib/inets/src/http_lib/http_internal.hrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2002-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2002-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -28,6 +28,7 @@
-define(HTTP_MAX_URI_SIZE, nolimit).
-define(HTTP_MAX_VERSION_STRING, 8).
-define(HTTP_MAX_METHOD_STRING, 20).
+-define(HTTP_MAX_CONTENT_LENGTH, 100000000).
-ifndef(HTTP_DEFAULT_SSL_KIND).
-define(HTTP_DEFAULT_SSL_KIND, essl).
diff --git a/lib/inets/src/http_lib/http_request.erl b/lib/inets/src/http_lib/http_request.erl
index f295453bdd..a0833ddf01 100644
--- a/lib/inets/src/http_lib/http_request.erl
+++ b/lib/inets/src/http_lib/http_request.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2005-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2005-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -21,8 +21,16 @@
-include("http_internal.hrl").
--export([headers/2, http_headers/1, is_absolut_uri/1]).
+-export([headers/2, http_headers/1, is_absolut_uri/1, key_value/1]).
+
+key_value(KeyValueStr) ->
+ case lists:splitwith(fun($:) -> false; (_) -> true end, KeyValueStr) of
+ {Key, [$: | Value]} ->
+ {http_util:to_lower(string:strip(Key)), string:strip(Value)};
+ {_, []} ->
+ undefined
+ end.
%%-------------------------------------------------------------------------
%% headers(HeaderList, #http_request_h{}) -> #http_request_h{}
%% HeaderList - ["HeaderField:Value"]
@@ -34,14 +42,12 @@
%%-------------------------------------------------------------------------
headers([], Headers) ->
Headers;
-headers([Header | Tail], Headers) ->
- case lists:splitwith(fun($:) -> false; (_) -> true end, Header) of
- {Key, [$: | Value]} ->
- headers(Tail, headers(http_util:to_lower(string:strip(Key)),
- string:strip(Value), Headers));
- {_, []} ->
- headers(Tail, Headers)
- end.
+headers([{Key, Value} | Tail], Headers) ->
+ headers(Tail, headers(Key, Value, Headers));
+headers([undefined], Headers) ->
+ Headers;
+headers(KeyValues, Headers) ->
+ headers([key_value(KeyValue) || KeyValue <- KeyValues], Headers).
%%-------------------------------------------------------------------------
%% headers(#http_request_h{}) -> HeaderList
diff --git a/lib/inets/src/http_lib/http_uri.erl b/lib/inets/src/http_lib/http_uri.erl
index 5962001c3a..350a4bc169 100644
--- a/lib/inets/src/http_lib/http_uri.erl
+++ b/lib/inets/src/http_lib/http_uri.erl
@@ -90,8 +90,8 @@ parse(AbsURI, Opts) ->
{error, Reason};
{Scheme, DefaultPort, Rest} ->
case (catch parse_uri_rest(Scheme, DefaultPort, Rest, Opts)) of
- {ok, {UserInfo, Host, Port, Path, Query}} ->
- {ok, {Scheme, UserInfo, Host, Port, Path, Query}};
+ {ok, Result} ->
+ {ok, Result};
{error, Reason} ->
{error, {Reason, Scheme, AbsURI}};
_ ->
@@ -148,27 +148,22 @@ parse_scheme(AbsURI, Opts) ->
end.
parse_uri_rest(Scheme, DefaultPort, "//" ++ URIPart, Opts) ->
- {Authority, PathQuery} =
- case split_uri(URIPart, "/", URIPart, 1, 0) of
- Split = {_, _} ->
- Split;
- URIPart ->
- case split_uri(URIPart, "\\?", URIPart, 1, 0) of
- Split = {_, _} ->
- Split;
- URIPart ->
- {URIPart,""}
- end
- end,
+ {Authority, PathQueryFragment} =
+ split_uri(URIPart, "[/?#]", {URIPart, ""}, 1, 0),
+ {RawPath, QueryFragment} =
+ split_uri(PathQueryFragment, "[?#]", {PathQueryFragment, ""}, 1, 0),
+ {Query, Fragment} =
+ split_uri(QueryFragment, "#", {QueryFragment, ""}, 1, 0),
{UserInfo, HostPort} = split_uri(Authority, "@", {"", Authority}, 1, 1),
{Host, Port} = parse_host_port(Scheme, DefaultPort, HostPort, Opts),
- {Path, Query} = parse_path_query(PathQuery),
- {ok, {UserInfo, Host, Port, Path, Query}}.
-
+ Path = path(RawPath),
+ case lists:keyfind(fragment, 1, Opts) of
+ {fragment, true} ->
+ {ok, {Scheme, UserInfo, Host, Port, Path, Query, Fragment}};
+ _ ->
+ {ok, {Scheme, UserInfo, Host, Port, Path, Query}}
+ end.
-parse_path_query(PathQuery) ->
- {Path, Query} = split_uri(PathQuery, "\\?", {PathQuery, ""}, 1, 0),
- {path(Path), Query}.
%% In this version of the function, we no longer need
%% the Scheme argument, but just in case...
diff --git a/lib/inets/src/http_server/httpd_conf.erl b/lib/inets/src/http_server/httpd_conf.erl
index 8f68d9fcd5..78dda794db 100644
--- a/lib/inets/src/http_server/httpd_conf.erl
+++ b/lib/inets/src/http_server/httpd_conf.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -205,13 +205,13 @@ load("MaxURISize " ++ MaxHeaderSize, []) ->
" is an invalid number of MaxHeaderSize")}
end;
-load("MaxBodySize " ++ MaxBodySize, []) ->
- case make_integer(MaxBodySize) of
+load("MaxContentLength " ++ Max, []) ->
+ case make_integer(Max) of
{ok, Integer} ->
- {ok, [], {max_body_size,Integer}};
+ {ok, [], {max_content_length, Integer}};
{error, _} ->
- {error, ?NICE(clean(MaxBodySize) ++
- " is an invalid number of MaxBodySize")}
+ {error, ?NICE(clean(Max) ++
+ " is an invalid number of MaxContentLength")}
end;
load("ServerName " ++ ServerName, []) ->
@@ -337,7 +337,7 @@ load("MaxKeepAliveRequest " ++ MaxRequests, []) ->
load("KeepAliveTimeout " ++ Timeout, []) ->
case make_integer(Timeout) of
{ok, Integer} ->
- {ok, [], {keep_alive_timeout, Integer*1000}};
+ {ok, [], {keep_alive_timeout, Integer}};
{error, _} ->
{error, ?NICE(clean(Timeout)++" is an invalid KeepAliveTimeout")}
end;
@@ -569,6 +569,12 @@ validate_config_params([{max_body_size, Value} | Rest])
validate_config_params([{max_body_size, Value} | _]) ->
throw({max_body_size, Value});
+validate_config_params([{max_content_length, Value} | Rest])
+ when is_integer(Value) andalso (Value > 0) ->
+ validate_config_params(Rest);
+validate_config_params([{max_content_length, Value} | _]) ->
+ throw({max_content_length, Value});
+
validate_config_params([{server_name, Value} | Rest])
when is_list(Value) ->
validate_config_params(Rest);
@@ -635,7 +641,7 @@ validate_config_params([{max_keep_alive_request, Value} | Rest])
when is_integer(Value) andalso (Value > 0) ->
validate_config_params(Rest);
validate_config_params([{max_keep_alive_request, Value} | _]) ->
- throw({max_header_size, Value});
+ throw({max_keep_alive_request, Value});
validate_config_params([{keep_alive_timeout, Value} | Rest])
when is_integer(Value) andalso (Value >= 0) ->
@@ -799,7 +805,7 @@ store({server_tokens, ServerTokens} = Entry, _ConfigList) ->
Server = server(ServerTokens),
{ok, [Entry, {server, Server}]};
store({keep_alive_timeout, KeepAliveTimeout}, _ConfigList) ->
- {ok, {keep_alive_timeout, KeepAliveTimeout * 1000}};
+ {ok, {keep_alive_timeout, KeepAliveTimeout}};
store(ConfigListEntry, _ConfigList) ->
{ok, ConfigListEntry}.
diff --git a/lib/inets/src/http_server/httpd_request.erl b/lib/inets/src/http_server/httpd_request.erl
index 712c73599f..6985065c3e 100644
--- a/lib/inets/src/http_server/httpd_request.erl
+++ b/lib/inets/src/http_server/httpd_request.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2005-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2005-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -118,18 +118,17 @@ validate(Method, Uri, Version) ->
%% create it.
%% ----------------------------------------------------------------------
update_mod_data(ModData, Method, RequestURI, HTTPVersion, Headers)->
- ParsedHeaders = tagup_header(Headers),
- PersistentConn = get_persistens(HTTPVersion, ParsedHeaders,
+ PersistentConn = get_persistens(HTTPVersion, Headers,
ModData#mod.config_db),
{ok, ModData#mod{data = [],
method = Method,
absolute_uri = format_absolute_uri(RequestURI,
- ParsedHeaders),
+ Headers),
request_uri = format_request_uri(RequestURI),
http_version = HTTPVersion,
request_line = Method ++ " " ++ RequestURI ++
" " ++ HTTPVersion,
- parsed_header = ParsedHeaders,
+ parsed_header = Headers,
connection = PersistentConn}}.
%%%========================================================================
@@ -146,14 +145,14 @@ parse_method(_, _, _, Max, _, _) ->
%% We do not know the version of the client as it comes after the
%% method send the lowest version in the response so that the client
%% will be able to handle it.
- {error, {too_long, Max, 413, "Method unreasonably long"}, lowest_version()}.
+ {error, {size_error, Max, 413, "Method unreasonably long"}, lowest_version()}.
parse_uri(_, _, Current, MaxURI, _, _)
when (Current > MaxURI) andalso (MaxURI =/= nolimit) ->
%% We do not know the version of the client as it comes after the
%% uri send the lowest version in the response so that the client
%% will be able to handle it.
- {error, {too_long, MaxURI, 414, "URI unreasonably long"},lowest_version()};
+ {error, {size_error, MaxURI, 414, "URI unreasonably long"},lowest_version()};
parse_uri(<<>>, URI, Current, Max, MaxSizes, Result) ->
{?MODULE, parse_uri, [URI, Current, Max, MaxSizes, Result]};
parse_uri(<<?SP, Rest/binary>>, URI, _, _, MaxSizes, Result) ->
@@ -179,12 +178,12 @@ parse_version(<<?CR>> = Data, Version, Current, Max, MaxSizes, Result) ->
parse_version(<<Octet, Rest/binary>>, Version, Current, Max, MaxSizes, Result) when Current =< Max ->
parse_version(Rest, [Octet | Version], Current + 1, Max, MaxSizes, Result);
parse_version(_, _, _, Max,_,_) ->
- {error, {too_long, Max, 413, "Version string unreasonably long"}, lowest_version()}.
+ {error, {size_error, Max, 413, "Version string unreasonably long"}, lowest_version()}.
parse_headers(_, _, _, Current, Max, _, Result)
when Max =/= nolimit andalso Current > Max ->
HttpVersion = lists:nth(3, lists:reverse(Result)),
- {error, {too_long, Max, 413, "Headers unreasonably long"}, HttpVersion};
+ {error, {size_error, Max, 413, "Headers unreasonably long"}, HttpVersion};
parse_headers(<<>>, Header, Headers, Current, Max, MaxSizes, Result) ->
{?MODULE, parse_headers, [<<>>, Header, Headers, Current, Max,
@@ -204,14 +203,22 @@ parse_headers(<<?CR,?LF,?CR,?LF,Body/binary>>, [], [], _, _, _, Result) ->
Result])),
{ok, NewResult};
parse_headers(<<?CR,?LF,?CR,?LF,Body/binary>>, Header, Headers, _, _,
- _, Result) ->
- HTTPHeaders = [lists:reverse(Header) | Headers],
- RequestHeaderRcord =
- http_request:headers(HTTPHeaders, #http_request_h{}),
- NewResult =
- list_to_tuple(lists:reverse([Body, {RequestHeaderRcord,
- HTTPHeaders} | Result])),
- {ok, NewResult};
+ MaxSizes, Result) ->
+ case http_request:key_value(lists:reverse(Header)) of
+ undefined -> %% Skip headers with missing :
+ {ok, list_to_tuple(lists:reverse([Body, {http_request:headers(Headers, #http_request_h{}), Headers} | Result]))};
+ NewHeader ->
+ case check_header(NewHeader, MaxSizes) of
+ ok ->
+ {ok, list_to_tuple(lists:reverse([Body, {http_request:headers([NewHeader | Headers],
+ #http_request_h{}),
+ [NewHeader | Headers]} | Result]))};
+
+ {error, Reason} ->
+ HttpVersion = lists:nth(3, lists:reverse(Result)),
+ {error, Reason, HttpVersion}
+ end
+ end;
parse_headers(<<?CR,?LF,?CR>> = Data, Header, Headers, Current, Max,
MaxSizes, Result) ->
@@ -243,8 +250,21 @@ parse_headers(<<?LF, Octet, Rest/binary>>, Header, Headers, Current, Max,
MaxSizes, Result);
parse_headers(<<?CR,?LF, Octet, Rest/binary>>, Header, Headers, _, Max,
MaxSizes, Result) ->
- parse_headers(Rest, [Octet], [lists:reverse(Header) | Headers],
- 0, Max, MaxSizes, Result);
+ case http_request:key_value(lists:reverse(Header)) of
+ undefined -> %% Skip headers with missing :
+ parse_headers(Rest, [Octet], Headers,
+ 0, Max, MaxSizes, Result);
+ NewHeader ->
+ case check_header(NewHeader, MaxSizes) of
+ ok ->
+ parse_headers(Rest, [Octet], [NewHeader | Headers],
+ 0, Max, MaxSizes, Result);
+ {error, Reason} ->
+ HttpVersion = lists:nth(3, lists:reverse(Result)),
+ {error, Reason, HttpVersion}
+ end
+ end;
+
parse_headers(<<?CR>> = Data, Header, Headers, Current, Max,
MaxSizes, Result) ->
{?MODULE, parse_headers, [Data, Header, Headers, Current, Max,
@@ -388,29 +408,25 @@ get_persistens(HTTPVersion,ParsedHeader,ConfigDB)->
false
end.
-
-%%----------------------------------------------------------------------
-%% tagup_header
-%%
-%% Parses the header of a HTTP request and returns a key,value tuple
-%% list containing Name and Value of each header directive as of:
-%%
-%% Content-Type: multipart/mixed -> {"Content-Type", "multipart/mixed"}
-%%
-%% But in http/1.1 the field-names are case insencitive so now it must be
-%% Content-Type: multipart/mixed -> {"content-type", "multipart/mixed"}
-%% The standard furthermore says that leading and traling white space
-%% is not a part of the fieldvalue and shall therefore be removed.
-%%----------------------------------------------------------------------
-tagup_header([]) -> [];
-tagup_header([Line|Rest]) -> [tag(Line, [])|tagup_header(Rest)].
-
-tag([], Tag) ->
- {http_util:to_lower(lists:reverse(Tag)), ""};
-tag([$:|Rest], Tag) ->
- {http_util:to_lower(lists:reverse(Tag)), string:strip(Rest)};
-tag([Chr|Rest], Tag) ->
- tag(Rest, [Chr|Tag]).
-
lowest_version()->
"HTTP/0.9".
+
+check_header({"content-length", Value}, Maxsizes) ->
+ Max = proplists:get_value(max_content_length, Maxsizes),
+ MaxLen = length(integer_to_list(Max)),
+ case length(Value) =< MaxLen of
+ true ->
+ try
+ _ = list_to_integer(Value),
+ ok
+ catch _:_ ->
+ {error, {size_error, Max, 411, "content-length not an integer"}}
+ end;
+ false ->
+ {error, {size_error, Max, 413, "content-length unreasonably long"}}
+ end;
+check_header(_, _) ->
+ ok.
+
+
+
diff --git a/lib/inets/src/http_server/httpd_request_handler.erl b/lib/inets/src/http_server/httpd_request_handler.erl
index 9bea58cc9e..f7a9fe5d49 100644
--- a/lib/inets/src/http_server/httpd_request_handler.erl
+++ b/lib/inets/src/http_server/httpd_request_handler.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -96,8 +96,9 @@ init([Manager, ConfigDB, AcceptTimeout]) ->
proc_lib:init_ack({ok, self()}),
{SocketType, Socket} = await_socket_ownership_transfer(AcceptTimeout),
-
- KeepAliveTimeOut = httpd_util:lookup(ConfigDB, keep_alive_timeout, 150000),
+
+ %%Timeout value is in seconds we want it in milliseconds
+ KeepAliveTimeOut = 1000 * httpd_util:lookup(ConfigDB, keep_alive_timeout, 150),
case http_transport:negotiate(SocketType, Socket, ?HANDSHAKE_TIMEOUT) of
{error, _Error} ->
@@ -119,11 +120,15 @@ continue_init(Manager, ConfigDB, SocketType, Socket, TimeOut) ->
MaxHeaderSize = max_header_size(ConfigDB),
MaxURISize = max_uri_size(ConfigDB),
NrOfRequest = max_keep_alive_request(ConfigDB),
-
+ MaxContentLen = max_content_length(ConfigDB),
+
{_, Status} = httpd_manager:new_connection(Manager),
MFA = {httpd_request, parse, [[{max_uri, MaxURISize}, {max_header, MaxHeaderSize},
- {max_version, ?HTTP_MAX_VERSION_STRING}, {max_method, ?HTTP_MAX_METHOD_STRING}]]},
+ {max_version, ?HTTP_MAX_VERSION_STRING},
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, MaxContentLen}
+ ]]},
State = #state{mod = Mod,
manager = Manager,
@@ -207,7 +212,7 @@ handle_info({Proto, Socket, Data},
set_new_data_size(cancel_request_timeout(State), NewDataSize)
end,
handle_http_msg(Result, NewState);
- {error, {too_long, MaxSize, ErrCode, ErrStr}, Version} ->
+ {error, {size_error, MaxSize, ErrCode, ErrStr}, Version} ->
NewModData = ModData#mod{http_version = Version},
httpd_response:send_status(NewModData, ErrCode, ErrStr),
Reason = io_lib:format("~p: ~p max size is ~p~n",
@@ -444,8 +449,7 @@ handle_body(#state{headers = Headers, body = Body, mod = ModData} = State,
error_log(Reason, ModData),
{stop, normal, State#state{response_sent = true}};
_ ->
- Length =
- list_to_integer(Headers#http_request_h.'content-length'),
+ Length = list_to_integer(Headers#http_request_h.'content-length'),
case ((Length =< MaxBodySize) or (MaxBodySize == nolimit)) of
true ->
case httpd_request:whole_body(Body, Length) of
@@ -454,7 +458,7 @@ handle_body(#state{headers = Headers, body = Body, mod = ModData} = State,
ModData#mod.socket,
[{active, once}]),
{noreply, State#state{mfa =
- {Module, Function, Args}}};
+ {Module, Function, Args}}};
{ok, NewBody} ->
handle_response(
@@ -471,7 +475,7 @@ handle_body(#state{headers = Headers, body = Body, mod = ModData} = State,
handle_expect(#state{headers = Headers, mod =
#mod{config_db = ConfigDB} = ModData} = State,
MaxBodySize) ->
- Length = Headers#http_request_h.'content-length',
+ Length = list_to_integer(Headers#http_request_h.'content-length'),
case expect(Headers, ModData#mod.http_version, ConfigDB) of
continue when (MaxBodySize > Length) orelse (MaxBodySize =:= nolimit) ->
httpd_response:send_status(ModData, 100, ""),
@@ -545,9 +549,13 @@ handle_next_request(#state{mod = #mod{connection = true} = ModData,
init_data = ModData#mod.init_data},
MaxHeaderSize = max_header_size(ModData#mod.config_db),
MaxURISize = max_uri_size(ModData#mod.config_db),
+ MaxContentLen = max_content_length(ModData#mod.config_db),
MFA = {httpd_request, parse, [[{max_uri, MaxURISize}, {max_header, MaxHeaderSize},
- {max_version, ?HTTP_MAX_VERSION_STRING}, {max_method, ?HTTP_MAX_METHOD_STRING}]]},
+ {max_version, ?HTTP_MAX_VERSION_STRING},
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, MaxContentLen}
+ ]]},
TmpState = State#state{mod = NewModData,
mfa = MFA,
max_keep_alive_request = decrease(Max),
@@ -630,3 +638,5 @@ max_body_size(ConfigDB) ->
max_keep_alive_request(ConfigDB) ->
httpd_util:lookup(ConfigDB, max_keep_alive_request, infinity).
+max_content_length(ConfigDB) ->
+ httpd_util:lookup(ConfigDB, max_content_length, ?HTTP_MAX_CONTENT_LENGTH).
diff --git a/lib/inets/src/http_server/mod_alias.erl b/lib/inets/src/http_server/mod_alias.erl
index 0b9fe4cfe0..5039cd56b5 100644
--- a/lib/inets/src/http_server/mod_alias.erl
+++ b/lib/inets/src/http_server/mod_alias.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -55,6 +55,7 @@ do(#mod{data = Data} = Info) ->
do_alias(#mod{config_db = ConfigDB,
request_uri = ReqURI,
+ socket_type = SocketType,
data = Data}) ->
{ShortPath, Path, AfterPath} =
real_name(ConfigDB, ReqURI, which_alias(ConfigDB)),
@@ -70,8 +71,9 @@ do_alias(#mod{config_db = ConfigDB,
(LastChar =/= $/)) ->
?hdrt("directory and last-char is a /", []),
ServerName = which_server_name(ConfigDB),
- Port = port_string( which_port(ConfigDB) ),
- URL = "http://" ++ ServerName ++ Port ++ ReqURI ++ "/",
+ Port = port_string(which_port(ConfigDB)),
+ Protocol = get_protocol(SocketType),
+ URL = Protocol ++ ServerName ++ Port ++ ReqURI ++ "/",
ReasonPhrase = httpd_util:reason_phrase(301),
Message = httpd_util:message(301, URL, ConfigDB),
{proceed,
@@ -94,6 +96,12 @@ port_string(80) ->
port_string(Port) ->
":" ++ integer_to_list(Port).
+get_protocol(ip_comm) ->
+ "http://";
+get_protocol(_) ->
+ %% Should clean up to have only one ssl type essl vs ssl is not relevant any more
+ "https://".
+
%% real_name
real_name(ConfigDB, RequestURI, []) ->
diff --git a/lib/inets/test/http_format_SUITE.erl b/lib/inets/test/http_format_SUITE.erl
index d4a3f28f38..5952e9fd6e 100644
--- a/lib/inets/test/http_format_SUITE.erl
+++ b/lib/inets/test/http_format_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2004-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2004-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -355,10 +355,12 @@ http_request(Config) when is_list(Config) ->
"http://www.erlang.org",
"HTTP/1.1",
{#http_request_h{host = "www.erlang.org", te = []},
- ["te: ","host:www.erlang.org"]}, <<>>} =
+ [{"te", []}, {"host", "www.erlang.org"}]}, <<>>} =
parse(httpd_request, parse, [[{max_header, ?HTTP_MAX_HEADER_SIZE},
{max_version, ?HTTP_MAX_VERSION_STRING},
- {max_method, ?HTTP_MAX_METHOD_STRING}]],
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, ?HTTP_MAX_CONTENT_LENGTH}
+ ]],
HttpHead),
HttpHead1 = ["GET http://www.erlang.org HTTP/1.1" ++
@@ -369,7 +371,9 @@ http_request(Config) when is_list(Config) ->
{#http_request_h{}, []}, <<>>} =
parse(httpd_request, parse, [[{max_header, ?HTTP_MAX_HEADER_SIZE},
{max_version, ?HTTP_MAX_VERSION_STRING},
- {max_method, ?HTTP_MAX_METHOD_STRING}]], HttpHead1),
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, ?HTTP_MAX_CONTENT_LENGTH}
+ ]], HttpHead1),
HttpHead2 = ["GET http://www.erlang.org HTTP/1.1" ++
@@ -380,7 +384,9 @@ http_request(Config) when is_list(Config) ->
{#http_request_h{}, []}, <<>>} =
parse(httpd_request, parse, [[{max_header, ?HTTP_MAX_HEADER_SIZE},
{max_version, ?HTTP_MAX_VERSION_STRING},
- {max_method, ?HTTP_MAX_METHOD_STRING}]], HttpHead2),
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, ?HTTP_MAX_CONTENT_LENGTH}
+ ]], HttpHead2),
%% Note the following body is not related to the headers above
HttpBody = ["<HTML>\n<HEAD>\n<TITLE> dummy </TITLE>\n</HEAD>\n<BODY>\n",
diff --git a/lib/inets/test/httpc_SUITE.erl b/lib/inets/test/httpc_SUITE.erl
index 63f8bc5bc6..0e89e831fb 100644
--- a/lib/inets/test/httpc_SUITE.erl
+++ b/lib/inets/test/httpc_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2004-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2004-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -92,6 +92,7 @@ only_simulated() ->
cookie,
cookie_profile,
empty_set_cookie,
+ invalid_set_cookie,
trace,
stream_once,
stream_single_chunk,
@@ -570,6 +571,18 @@ empty_set_cookie(Config) when is_list(Config) ->
ok = httpc:set_options([{cookies, disabled}]).
%%-------------------------------------------------------------------------
+invalid_set_cookie(doc) ->
+ ["Test ignoring invalid Set-Cookie header"];
+invalid_set_cookie(Config) when is_list(Config) ->
+ ok = httpc:set_options([{cookies, enabled}]),
+
+ URL = url(group_name(Config), "/invalid_set_cookie.html", Config),
+ {ok, {{_,200,_}, [_|_], [_|_]}} =
+ httpc:request(get, {URL, []}, [], []),
+
+ ok = httpc:set_options([{cookies, disabled}]).
+
+%%-------------------------------------------------------------------------
headers_as_is(doc) ->
["Test the option headers_as_is"];
headers_as_is(Config) when is_list(Config) ->
@@ -1275,8 +1288,9 @@ dummy_server_init(Caller, ip_comm, Inet, _) ->
dummy_ipcomm_server_loop({httpd_request, parse, [[{max_uri, ?HTTP_MAX_URI_SIZE},
{max_header, ?HTTP_MAX_HEADER_SIZE},
{max_version,?HTTP_MAX_VERSION_STRING},
- {max_method, ?HTTP_MAX_METHOD_STRING}]]},
- [], ListenSocket);
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, ?HTTP_MAX_CONTENT_LENGTH}]]},
+ [], ListenSocket);
dummy_server_init(Caller, ssl, Inet, SSLOptions) ->
BaseOpts = [binary, {reuseaddr,true}, {active, false} |
@@ -1290,7 +1304,9 @@ dummy_ssl_server_init(Caller, BaseOpts, Inet) ->
dummy_ssl_server_loop({httpd_request, parse, [[{max_uri, ?HTTP_MAX_URI_SIZE},
{max_method, ?HTTP_MAX_METHOD_STRING},
{max_version,?HTTP_MAX_VERSION_STRING},
- {max_method, ?HTTP_MAX_METHOD_STRING}]]},
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, ?HTTP_MAX_CONTENT_LENGTH}
+ ]]},
[], ListenSocket).
dummy_ipcomm_server_loop(MFA, Handlers, ListenSocket) ->
@@ -1367,16 +1383,20 @@ handle_request(Module, Function, Args, Socket) ->
stop ->
stop;
<<>> ->
- {httpd_request, parse, [[<<>>, [{max_uri, ?HTTP_MAX_URI_SIZE},
+ {httpd_request, parse, [[{max_uri,?HTTP_MAX_URI_SIZE},
{max_header, ?HTTP_MAX_HEADER_SIZE},
{max_version,?HTTP_MAX_VERSION_STRING},
- {max_method, ?HTTP_MAX_METHOD_STRING}]]]};
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, ?HTTP_MAX_CONTENT_LENGTH}
+ ]]};
Data ->
handle_request(httpd_request, parse,
[Data, [{max_uri, ?HTTP_MAX_URI_SIZE},
- {max_header, ?HTTP_MAX_HEADER_SIZE},
- {max_version,?HTTP_MAX_VERSION_STRING},
- {max_method, ?HTTP_MAX_METHOD_STRING}]], Socket)
+ {max_header, ?HTTP_MAX_HEADER_SIZE},
+ {max_version,?HTTP_MAX_VERSION_STRING},
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, ?HTTP_MAX_CONTENT_LENGTH}
+ ]], Socket)
end;
NewMFA ->
NewMFA
@@ -1466,7 +1486,7 @@ dummy_ssl_server_hang_loop(_) ->
ensure_host_header_with_port([]) ->
false;
-ensure_host_header_with_port(["host: " ++ Host| _]) ->
+ensure_host_header_with_port([{"host", Host}| _]) ->
case string:tokens(Host, [$:]) of
[_ActualHost, _Port] ->
true;
@@ -1478,7 +1498,7 @@ ensure_host_header_with_port([_|T]) ->
auth_header([]) ->
auth_header_not_found;
-auth_header(["authorization:" ++ Value | _]) ->
+auth_header([{"authorization", Value} | _]) ->
{ok, string:strip(Value)};
auth_header([_ | Tail]) ->
auth_header(Tail).
@@ -1495,7 +1515,7 @@ handle_auth("Basic " ++ UserInfo, Challange, DefaultResponse) ->
check_cookie([]) ->
ct:fail(no_cookie_header);
-check_cookie(["cookie:" ++ _Value | _]) ->
+check_cookie([{"cookie", _} | _]) ->
ok;
check_cookie([_Head | Tail]) ->
check_cookie(Tail).
@@ -1715,6 +1735,14 @@ handle_uri(_,"/empty_set_cookie.html",_,_,_,_) ->
"Content-Length:32\r\n\r\n"++
"<HTML><BODY>foobar</BODY></HTML>";
+handle_uri(_,"/invalid_set_cookie.html",_,_,_,_) ->
+ "HTTP/1.1 200 ok\r\n" ++
+ "set-cookie: =\r\n" ++
+ "set-cookie: name=\r\n" ++
+ "set-cookie: name-or-value\r\n" ++
+ "Content-Length:32\r\n\r\n"++
+ "<HTML><BODY>foobar</BODY></HTML>";
+
handle_uri(_,"/missing_crlf.html",_,_,_,_) ->
"HTTP/1.1 200 ok" ++
"Content-Length:32\r\n" ++
diff --git a/lib/inets/test/httpd_SUITE.erl b/lib/inets/test/httpd_SUITE.erl
index 4010597657..342004f19b 100644
--- a/lib/inets/test/httpd_SUITE.erl
+++ b/lib/inets/test/httpd_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2013-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -132,6 +132,7 @@ http_get() ->
bad_hex,
missing_CR,
max_header,
+ max_content_length,
ipv6
].
@@ -979,13 +980,22 @@ max_header(Config) when is_list(Config) ->
Host = ?config(host, Config),
case Version of
"HTTP/0.9" ->
- {skip, no_implemented};
+ {skip, not_implemented};
_ ->
dos_hostname(?config(type, Config), ?config(port, Config), Host,
?config(node, Config), Version, ?MAX_HEADER_SIZE)
end.
%%-------------------------------------------------------------------------
+max_content_length() ->
+ ["Denial Of Service (DOS) attack, prevented by max_content_length"].
+max_content_length(Config) when is_list(Config) ->
+ Version = ?config(http_version, Config),
+ Host = ?config(host, Config),
+ garbage_content_length(?config(type, Config), ?config(port, Config), Host,
+ ?config(node, Config), Version).
+
+%%-------------------------------------------------------------------------
security_1_1(Config) when is_list(Config) ->
security([{http_version, "HTTP/1.1"} | Config]).
@@ -1368,7 +1378,9 @@ server_config(http_reload, Config) ->
server_config(https_reload, Config) ->
[{keep_alive_timeout, 2}] ++ server_config(https, Config);
server_config(http_limit, Config) ->
- [{max_clients, 1}] ++ server_config(http, Config);
+ [{max_clients, 1},
+ %% Make sure option checking code is run
+ {max_content_length, 100000002}] ++ server_config(http, Config);
server_config(https_limit, Config) ->
[{max_clients, 1}] ++ server_config(https, Config);
server_config(http_basic_auth, Config) ->
@@ -1814,7 +1826,7 @@ dos_hostname(Type, Port, Host, Node, Version, Max) ->
ok = httpd_test_lib:verify_request(Type, Host, Port, Node,
dos_hostname_request(TooLongHeader, Version),
- [{statuscode, dos_code(Version)},
+ [{statuscode, request_entity_too_large_code(Version)},
{version, Version}]).
dos_hostname_request(Host, Version) ->
dos_http_request("GET / ", Version, Host).
@@ -1824,11 +1836,32 @@ dos_http_request(Request, "HTTP/1.1" = Version, Host) ->
dos_http_request(Request, Version, Host) ->
Request ++ Version ++ "\r\nhost:" ++ Host ++ "\r\n\r\n".
-dos_code("HTTP/1.0") ->
+request_entity_too_large_code("HTTP/1.0") ->
403; %% 413 not defined in HTTP/1.0
-dos_code(_) ->
+request_entity_too_large_code(_) ->
413.
+length_required_code("HTTP/1.0") ->
+ 403; %% 411 not defined in HTTP/1.0
+length_required_code(_) ->
+ 411.
+
+garbage_content_length(Type, Port, Host, Node, Version) ->
+ ok = httpd_test_lib:verify_request(Type, Host, Port, Node,
+ garbage_content_length_request("GET / ", Version, Host, "aaaa"),
+ [{statuscode, length_required_code(Version)},
+ {version, Version}]),
+ ok = httpd_test_lib:verify_request(Type, Host, Port, Node,
+ garbage_content_length_request("GET / ", Version, Host,
+ lists:duplicate($a, 100)),
+ [{statuscode, request_entity_too_large_code(Version)},
+ {version, Version}]).
+
+garbage_content_length_request(Request, Version, Host, Garbage) ->
+ http_request(Request, Version, Host,
+ {"content-length:" ++ Garbage, "Body with garbage content length indicator"}).
+
+
update_password(Node, ServerRoot, _Address, Port, AuthPrefix, Dir, Old, New)->
Directory = filename:join([ServerRoot, "htdocs", AuthPrefix ++ Dir]),
rpc:call(Node, mod_auth, update_password,
diff --git a/lib/inets/test/uri_SUITE.erl b/lib/inets/test/uri_SUITE.erl
index 9ba09e1474..f75e347d0c 100644
--- a/lib/inets/test/uri_SUITE.erl
+++ b/lib/inets/test/uri_SUITE.erl
@@ -46,6 +46,7 @@ all() ->
userinfo,
scheme,
queries,
+ fragments,
escaped,
hexed_query
].
@@ -105,6 +106,42 @@ queries(Config) when is_list(Config) ->
{ok, {http,[],"localhost",8888,"/foobar.html","?foo=bar&foobar=42"}} =
http_uri:parse("http://localhost:8888/foobar.html?foo=bar&foobar=42").
+fragments(Config) when is_list(Config) ->
+ {ok, {http,[],"localhost",80,"/",""}} =
+ http_uri:parse("http://localhost#fragment"),
+ {ok, {http,[],"localhost",80,"/path",""}} =
+ http_uri:parse("http://localhost/path#fragment"),
+ {ok, {http,[],"localhost",80,"/","?query"}} =
+ http_uri:parse("http://localhost?query#fragment"),
+ {ok, {http,[],"localhost",80,"/path","?query"}} =
+ http_uri:parse("http://localhost/path?query#fragment"),
+ {ok, {http,[],"localhost",80,"/","","#fragment"}} =
+ http_uri:parse("http://localhost#fragment", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/path","","#fragment"}} =
+ http_uri:parse("http://localhost/path#fragment", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/","?query","#fragment"}} =
+ http_uri:parse("http://localhost?query#fragment", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/path","?query","#fragment"}} =
+ http_uri:parse("http://localhost/path?query#fragment",
+ [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/","",""}} =
+ http_uri:parse("http://localhost", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/path","",""}} =
+ http_uri:parse("http://localhost/path", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/","?query",""}} =
+ http_uri:parse("http://localhost?query", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/path","?query",""}} =
+ http_uri:parse("http://localhost/path?query", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/","","#"}} =
+ http_uri:parse("http://localhost#", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/path","","#"}} =
+ http_uri:parse("http://localhost/path#", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/","?query","#"}} =
+ http_uri:parse("http://localhost?query#", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/path","?query","#"}} =
+ http_uri:parse("http://localhost/path?query#", [{fragment,true}]),
+ ok.
+
escaped(Config) when is_list(Config) ->
{ok, {http,[],"www.somedomain.com",80,"/%2Eabc",[]}} =
http_uri:parse("http://www.somedomain.com/%2Eabc"),
diff --git a/lib/inets/vsn.mk b/lib/inets/vsn.mk
index dbae5e4b3c..7d11916454 100644
--- a/lib/inets/vsn.mk
+++ b/lib/inets/vsn.mk
@@ -18,6 +18,6 @@
# %CopyrightEnd%
APPLICATION = inets
-INETS_VSN = 5.10.4
+INETS_VSN = 5.10.5
PRE_VSN =
APP_VSN = "$(APPLICATION)-$(INETS_VSN)$(PRE_VSN)"
diff --git a/lib/kernel/doc/src/gen_sctp.xml b/lib/kernel/doc/src/gen_sctp.xml
index dc9e4766a9..ee8cd441d4 100644
--- a/lib/kernel/doc/src/gen_sctp.xml
+++ b/lib/kernel/doc/src/gen_sctp.xml
@@ -961,7 +961,7 @@
<pre> #sctp_paddrinfo{
assoc_id = assoc_id(),
address = {IP, Port},
- state = inactive | active,
+ state = inactive | active | unconfirmed,
cwnd = integer(),
srtt = integer(),
rto = integer(),
diff --git a/lib/kernel/doc/src/inet.xml b/lib/kernel/doc/src/inet.xml
index 8dd311e5cd..77a8caaaf6 100644
--- a/lib/kernel/doc/src/inet.xml
+++ b/lib/kernel/doc/src/inet.xml
@@ -332,23 +332,23 @@ fe80::204:acff:fe17:bf38
<taglist>
<tag><c>recv_avg</c></tag>
<item>
- <p>Average size of packets in bytes received to the socket.</p>
+ <p>Average size of packets in bytes received by the socket.</p>
</item>
<tag><c>recv_cnt</c></tag>
<item>
- <p>Number of packets received to the socket.</p>
+ <p>Number of packets received by the socket.</p>
</item>
<tag><c>recv_dvi</c></tag>
<item>
- <p>Average packet size deviation in bytes received to the socket.</p>
+ <p>Average packet size deviation in bytes received by the socket.</p>
</item>
<tag><c>recv_max</c></tag>
<item>
- <p>The size of the largest packet in bytes received to the socket.</p>
+ <p>The size of the largest packet in bytes received by the socket.</p>
</item>
<tag><c>recv_oct</c></tag>
<item>
- <p>Number of bytes received to the socket.</p>
+ <p>Number of bytes received by the socket.</p>
</item>
<tag><c>send_avg</c></tag>
diff --git a/lib/kernel/doc/src/kernel_app.xml b/lib/kernel/doc/src/kernel_app.xml
index 00c6bc33d6..96e3651140 100644
--- a/lib/kernel/doc/src/kernel_app.xml
+++ b/lib/kernel/doc/src/kernel_app.xml
@@ -4,7 +4,7 @@
<appref>
<header>
<copyright>
- <year>1996</year><year>2014</year>
+ <year>1996</year><year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -188,6 +188,18 @@
<p>Define the <c>First..Last</c> port range for the listener
socket of a distributed Erlang node.</p>
</item>
+ <tag><c>{inet_dist_listen_options, Opts}</c></tag>
+ <item>
+ <p>Define a list of extra socket options to be used when opening the
+ listening socket for a distributed Erlang node.
+ See <seealso marker="gen_tcp#listen/2">gen_tcp:listen/2</seealso></p>
+ </item>
+ <tag><c>{inet_dist_connect_options, Opts}</c></tag>
+ <item>
+ <p>Define a list of extra socket options to be used when connecting to
+ other distributed Erlang nodes.
+ See <seealso marker="gen_tcp#connect/4">gen_tcp:connect/4</seealso></p>
+ </item>
<tag><c>inet_parse_error_log = silent</c></tag>
<item>
<p>If this configuration parameter is set, no
diff --git a/lib/kernel/src/application_controller.erl b/lib/kernel/src/application_controller.erl
index daad45b6c2..6635885aaf 100644
--- a/lib/kernel/src/application_controller.erl
+++ b/lib/kernel/src/application_controller.erl
@@ -1615,7 +1615,6 @@ conv([Key, Val | T]) ->
[{make_term(Key), make_term(Val)} | conv(T)];
conv(_) -> [].
-%%% Fix some day: eliminate the duplicated code here
make_term(Str) ->
case erl_scan:string(Str) of
{ok, Tokens, _} ->
@@ -1623,16 +1622,17 @@ make_term(Str) ->
{ok, Term} ->
Term;
{error, {_,M,Reason}} ->
- error_logger:format("application_controller: ~ts: ~ts~n",
- [M:format_error(Reason), Str]),
- throw({error, {bad_environment_value, Str}})
+ handle_make_term_error(M, Reason, Str)
end;
{error, {_,M,Reason}, _} ->
- error_logger:format("application_controller: ~ts: ~ts~n",
- [M:format_error(Reason), Str]),
- throw({error, {bad_environment_value, Str}})
+ handle_make_term_error(M, Reason, Str)
end.
+handle_make_term_error(Mod, Reason, Str) ->
+ error_logger:format("application_controller: ~ts: ~ts~n",
+ [Mod:format_error(Reason), Str]),
+ throw({error, {bad_environment_value, Str}}).
+
get_env_i(Name, #state{conf_data = ConfData}) when is_list(ConfData) ->
case lists:keyfind(Name, 1, ConfData) of
{_Name, Env} -> Env;
diff --git a/lib/kernel/src/file_io_server.erl b/lib/kernel/src/file_io_server.erl
index 0e9ff5bc0f..7d30e7e1d8 100644
--- a/lib/kernel/src/file_io_server.erl
+++ b/lib/kernel/src/file_io_server.erl
@@ -307,18 +307,18 @@ io_request({get_chars,Enc,_Prompt,N},
#state{}=State) ->
get_chars(N, Enc, State);
-%%
-%% This optimization gives almost nothing - needs more working...
-%% Disabled for now. /PaN
-%%
-%% io_request({get_line,Enc,_Prompt},
-%% #state{unic=latin1}=State) ->
-%% get_line(Enc,State);
-
-io_request({get_line,Enc,_Prompt},
- #state{}=State) ->
- get_chars(io_lib, collect_line, [], Enc, State);
-
+io_request({get_line,OutEnc,_Prompt}, #state{buf=Buf, read_mode=Mode, unic=InEnc} = State0) ->
+ try
+ %% Minimize the encoding conversions
+ WorkEnc = case InEnc of
+ {_,_} -> OutEnc; %% utf16 or utf32
+ _ -> InEnc %% Byte oriented utf8 or latin1
+ end,
+ {Res, State} = get_line(start, convert_enc(Buf, InEnc, WorkEnc), WorkEnc, State0),
+ {reply, cast(Res, Mode, WorkEnc, OutEnc), State}
+ catch exit:ExError ->
+ {stop,ExError,{error,ExError},State0#state{buf= <<>>}}
+ end;
io_request({setopts, Opts},
#state{}=State) when is_list(Opts) ->
@@ -386,56 +386,40 @@ put_chars(Chars, InEncoding, #state{handle=Handle, unic=OutEncoding}=State) ->
{stop,normal,{error,{no_translation, InEncoding, OutEncoding}},State}
end.
-%%
-%% Process the I/O request get_line for latin1 encoding of file specially
-%% Unfortunately this function gives almost nothing, it needs more work
-%% I disable it for now /PaN
-%%
-%% srch(<<>>,_,_) ->
-%% nomatch;
-%% srch(<<X:8,_/binary>>,X,N) ->
-%% {match,N};
-%% srch(<<_:8,T/binary>>,X,N) ->
-%% srch(T,X,N+1).
-%% get_line(OutEnc, #state{handle=Handle,buf = <<>>,unic=latin1}=State) ->
-%% case ?PRIM_FILE:read(Handle,?READ_SIZE_BINARY) of
-%% {ok, B} ->
-%% get_line(OutEnc, State#state{buf = B});
-%% eof ->
-%% {reply,eof,State};
-%% {error,Reason}=Error ->
-%% {stop,Reason,Error,State}
-%% end;
-%% get_line(OutEnc, #state{handle=Handle,buf=Buf,read_mode=ReadMode,unic=latin1}=State) ->
-%% case srch(Buf,$\n,0) of
-%% nomatch ->
-%% case ?PRIM_FILE:read(Handle,?READ_SIZE_BINARY) of
-%% {ok, B} ->
-%% get_line(OutEnc,State#state{buf = <<Buf/binary,B/binary>>});
-%% eof ->
-%% std_reply(cast(Buf, ReadMode,latin1,OutEnc), State);
-%% {error,Reason}=Error ->
-%% {stop,Reason,Error,State#state{buf= <<>>}}
-%% end;
-%% {match,Pos} when Pos >= 1->
-%% PosP1 = Pos + 1,
-%% <<Res0:PosP1/binary,NewBuf/binary>> = Buf,
-%% PosM1 = Pos - 1,
-%% Res = case Res0 of
-%% <<Chomped:PosM1/binary,$\r:8,$\n:8>> ->
-%% cat(Chomped, <<"\n">>, ReadMode,latin1,OutEnc);
-%% _Other ->
-%% cast(Res0, ReadMode,latin1,OutEnc)
-%% end,
-%% {reply,Res,State#state{buf=NewBuf}};
-%% {match,Pos} ->
-%% PosP1 = Pos + 1,
-%% <<Res:PosP1/binary,NewBuf/binary>> = Buf,
-%% {reply,Res,State#state{buf=NewBuf}}
-%% end;
-%% get_line(_, #state{}=State) ->
-%% {error,{error,get_line},State}.
-
+get_line(S, {<<>>, Cont}, OutEnc,
+ #state{handle=Handle, read_mode=Mode, unic=InEnc}=State) ->
+ case ?PRIM_FILE:read(Handle, read_size(Mode)) of
+ {ok,Bin} ->
+ get_line(S, convert_enc([Cont, Bin], InEnc, OutEnc), OutEnc, State);
+ eof ->
+ get_line(S, {eof, Cont}, OutEnc, State);
+ {error,Reason}=Error ->
+ {stop,Reason,Error,State}
+ end;
+get_line(S0, {Buf, BCont}, OutEnc, #state{unic=InEnc}=State) ->
+ case io_lib:collect_line(S0, Buf, OutEnc, []) of
+ {stop, Result, Cont0} ->
+ %% Convert both buffers back to file InEnc encoding
+ {Cont, <<>>} = convert_enc(Cont0, OutEnc, InEnc),
+ {Result, State#state{buf=cast_binary([Cont, BCont])}};
+ S ->
+ get_line(S, {<<>>, BCont}, OutEnc, State)
+ end.
+
+convert_enc(Bins, Enc, Enc) ->
+ {cast_binary(Bins), <<>>};
+convert_enc(eof, _, _) ->
+ {<<>>, <<>>};
+convert_enc(Bin, InEnc, OutEnc) ->
+ case unicode:characters_to_binary(Bin, InEnc, OutEnc) of
+ Res when is_binary(Res) ->
+ {Res, <<>>};
+ {incomplete, Res, Cont} ->
+ {Res, Cont};
+ {error, _, _} ->
+ exit({no_translation, InEnc, OutEnc})
+ end.
+
%%
%% Process the I/O request get_chars
%%
@@ -640,8 +624,6 @@ invalid_unicode_error(Mod, Func, XtraArg, S) ->
%% Convert error code to make it look as before
err_func(io_lib, get_until, {_,F,_}) ->
- F;
-err_func(_, F, _) ->
F.
@@ -713,6 +695,8 @@ cat(B1, B2, list, latin1,_) ->
binary_to_list(B1)++binary_to_list(B2).
%% Cast binary to list or binary
+cast(eof, _, _, _) ->
+ eof;
cast(B, binary, latin1, latin1) ->
B;
cast(B, binary, InEncoding, OutEncoding) ->
@@ -736,6 +720,8 @@ cast(B, list, InEncoding, OutEncoding) ->
%% Convert buffer to binary
cast_binary(Binary) when is_binary(Binary) ->
Binary;
+cast_binary([<<>>|List]) ->
+ cast_binary(List);
cast_binary(List) when is_list(List) ->
list_to_binary(List);
cast_binary(_EOF) ->
diff --git a/lib/kernel/src/gen_udp.erl b/lib/kernel/src/gen_udp.erl
index 70dceb3679..860eec10a0 100644
--- a/lib/kernel/src/gen_udp.erl
+++ b/lib/kernel/src/gen_udp.erl
@@ -78,7 +78,7 @@
ipv6_v6only.
-type socket() :: port().
--export_type([option/0, option_name/0]).
+-export_type([option/0, option_name/0, socket/0]).
-spec open(Port) -> {ok, Socket} | {error, Reason} when
Port :: inet:port_number(),
diff --git a/lib/kernel/src/hipe_unified_loader.erl b/lib/kernel/src/hipe_unified_loader.erl
index e5928c7b63..2d124d95b7 100644
--- a/lib/kernel/src/hipe_unified_loader.erl
+++ b/lib/kernel/src/hipe_unified_loader.erl
@@ -827,7 +827,6 @@ patch_to_emu_step1(Mod) ->
%% were added as the result of dynamic apply calls. We must
%% purge them too, but we have no explicit record of them.
%% Therefore invalidate all native addresses for the module.
- %% emu_make_stubs/1 will repair the ones for compiled static calls.
hipe_bifs:invalidate_funinfo_native_addresses(MFAs),
%% Find all call sites that call these MFAs. As a side-effect,
%% create native stubs for any MFAs that are referred.
@@ -841,7 +840,6 @@ patch_to_emu_step1(Mod) ->
%% Step 2 must occur after the new BEAM stub module is created.
patch_to_emu_step2(ReferencesToPatch) ->
- emu_make_stubs(ReferencesToPatch),
redirect(ReferencesToPatch).
-spec is_loaded(Module::atom()) -> boolean().
@@ -852,21 +850,6 @@ is_loaded(M) when is_atom(M) ->
catch _:_ -> false
end.
--ifdef(notdef).
-emu_make_stubs([{MFA,_Refs}|Rest]) ->
- make_stub(MFA),
- emu_make_stubs(Rest);
-emu_make_stubs([]) ->
- [].
-
-make_stub({_,_,A} = MFA) ->
- EmuAddress = hipe_bifs:get_emu_address(MFA),
- StubAddress = hipe_bifs:make_native_stub(EmuAddress, A),
- hipe_bifs:set_funinfo_native_address(MFA, StubAddress).
--else.
-emu_make_stubs(_) -> [].
--endif.
-
%%--------------------------------------------------------------------
%% Given a list of MFAs, tag them with their referred_from references.
%% The resulting {MFA,Refs} list is later passed to redirect/1, once
diff --git a/lib/kernel/src/inet.erl b/lib/kernel/src/inet.erl
index 43bab8bcf0..ec2c350931 100644
--- a/lib/kernel/src/inet.erl
+++ b/lib/kernel/src/inet.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -1070,7 +1070,7 @@ gethostbyname_tm(Name, Type, Timer, [wins|_]=Opts) ->
gethostbyname_tm_native(Name, Type, Timer, Opts);
gethostbyname_tm(Name, Type, Timer, [native|_]=Opts) ->
gethostbyname_tm_native(Name, Type, Timer, Opts);
-gethostbyname_tm(Name, Type, Timer, [_|_]=Opts) ->
+gethostbyname_tm(Name, Type, Timer, [_|Opts]) ->
gethostbyname_tm(Name, Type, Timer, Opts);
%% Make sure we always can look up our own hostname.
gethostbyname_tm(Name, Type, Timer, []) ->
diff --git a/lib/kernel/src/inet_tcp_dist.erl b/lib/kernel/src/inet_tcp_dist.erl
index 63f236b069..835dcf2705 100644
--- a/lib/kernel/src/inet_tcp_dist.erl
+++ b/lib/kernel/src/inet_tcp_dist.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -77,7 +77,7 @@ listen(Name) ->
Error
end.
-do_listen(Options0) ->
+do_listen(Options) ->
{First,Last} = case application:get_env(kernel,inet_dist_listen_min) of
{ok,N} when is_integer(N) ->
case application:get_env(kernel,
@@ -90,13 +90,7 @@ do_listen(Options0) ->
_ ->
{0,0}
end,
- Options = case application:get_env(kernel, inet_dist_use_interface) of
- {ok, Ip} ->
- [{ip, Ip} | Options0];
- _ ->
- Options0
- end,
- do_listen(First, Last, [{backlog,128}|Options]).
+ do_listen(First, Last, listen_options([{backlog,128}|Options])).
do_listen(First,Last,_) when First > Last ->
{error,eaddrinuse};
@@ -108,6 +102,23 @@ do_listen(First,Last,Options) ->
Other
end.
+listen_options(Opts0) ->
+ Opts1 =
+ case application:get_env(kernel, inet_dist_use_interface) of
+ {ok, Ip} ->
+ [{ip, Ip} | Opts0];
+ _ ->
+ Opts0
+ end,
+ case application:get_env(kernel, inet_dist_listen_options) of
+ {ok,ListenOpts} ->
+ erlang:display({inet_dist_listen_options, ListenOpts}),
+ ListenOpts ++ Opts1;
+ _ ->
+ Opts1
+ end.
+
+
%% ------------------------------------------------------------
%% Accepts new connection attempts from other Erlang nodes.
%% ------------------------------------------------------------
@@ -219,7 +230,7 @@ nodelay() ->
_ ->
{nodelay, true}
end.
-
+
%% ------------------------------------------------------------
%% Get remote information about a Socket.
@@ -260,9 +271,11 @@ do_setup(Kernel, Node, Type, MyNode, LongOrShortNames,SetupTime) ->
?trace("port_please(~p) -> version ~p~n",
[Node,Version]),
dist_util:reset_timer(Timer),
- case inet_tcp:connect(Ip, TcpPort,
- [{active, false},
- {packet,2}]) of
+ case
+ inet_tcp:connect(
+ Ip, TcpPort,
+ connect_options([{active, false}, {packet, 2}]))
+ of
{ok, Socket} ->
HSData = #hs_data{
kernel_pid = Kernel,
@@ -324,6 +337,15 @@ do_setup(Kernel, Node, Type, MyNode, LongOrShortNames,SetupTime) ->
?shutdown(Node)
end.
+connect_options(Opts) ->
+ case application:get_env(kernel, inet_dist_connect_options) of
+ {ok,ConnectOpts} ->
+ erlang:display({inet_dist_listen_options, ConnectOpts}),
+ ConnectOpts ++ Opts;
+ _ ->
+ Opts
+ end.
+
%%
%% Close a socket.
%%
diff --git a/lib/kernel/src/standard_error.erl b/lib/kernel/src/standard_error.erl
index 10cf77e0d4..1c43063937 100644
--- a/lib/kernel/src/standard_error.erl
+++ b/lib/kernel/src/standard_error.erl
@@ -63,7 +63,7 @@ server(PortName,PortSettings) ->
run(Port).
run(P) ->
- put(unicode,false),
+ put(encoding, latin1),
server_loop(P).
server_loop(Port) ->
@@ -95,25 +95,47 @@ do_io_request(Req, From, ReplyAs, Port) ->
io_reply(From, ReplyAs, Reply).
%% New in R13B
-% Wide characters (Unicode)
-io_request({put_chars,Encoding,Chars}, Port) -> % Binary new in R9C
- put_chars(wrap_characters_to_binary(Chars,Encoding,
- case get(unicode) of
- true -> unicode;
- _ -> latin1
- end), Port);
-io_request({put_chars,Encoding,Mod,Func,Args}, Port) ->
- Result = case catch apply(Mod,Func,Args) of
- Data when is_list(Data); is_binary(Data) ->
- wrap_characters_to_binary(Data,Encoding,
- case get(unicode) of
- true -> unicode;
- _ -> latin1
- end);
- Undef ->
- Undef
- end,
- put_chars(Result, Port);
+%% Encoding option (unicode/latin1)
+io_request({put_chars,unicode,Chars}, Port) ->
+ case wrap_characters_to_binary(Chars, unicode, get(encoding)) of
+ error ->
+ {error,{error,put_chars}};
+ Bin ->
+ put_chars(Bin, Port)
+ end;
+io_request({put_chars,unicode,Mod,Func,Args}, Port) ->
+ case catch apply(Mod, Func, Args) of
+ Data when is_list(Data); is_binary(Data) ->
+ case wrap_characters_to_binary(Data, unicode, get(encoding)) of
+ Bin when is_binary(Bin) ->
+ put_chars(Bin, Port);
+ error ->
+ {error,{error,put_chars}}
+ end;
+ _ ->
+ {error,{error,put_chars}}
+ end;
+io_request({put_chars,latin1,Chars}, Port) ->
+ case catch unicode:characters_to_binary(Chars, latin1, get(encoding)) of
+ Data when is_binary(Data) ->
+ put_chars(Data, Port);
+ _ ->
+ {error,{error,put_chars}}
+ end;
+io_request({put_chars,latin1,Mod,Func,Args}, Port) ->
+ case catch apply(Mod, Func, Args) of
+ Data when is_list(Data); is_binary(Data) ->
+ case
+ catch unicode:characters_to_binary(Data, latin1, get(encoding))
+ of
+ Bin when is_binary(Bin) ->
+ put_chars(Bin, Port);
+ _ ->
+ {error,{error,put_chars}}
+ end;
+ _ ->
+ {error,{error,put_chars}}
+ end;
%% BC if called from pre-R13 node
io_request({put_chars,Chars}, Port) ->
io_request({put_chars,latin1,Chars}, Port);
@@ -134,10 +156,10 @@ io_request({get_geometry,rows},Port) ->
_ ->
{error,{error,enotsup}}
end;
-io_request({getopts,[]}, Port) ->
- getopts(Port);
-io_request({setopts,Opts}, Port) when is_list(Opts) ->
- setopts(Opts, Port);
+io_request(getopts, _Port) ->
+ getopts();
+io_request({setopts,Opts}, _Port) when is_list(Opts) ->
+ setopts(Opts);
io_request({requests,Reqs}, Port) ->
io_requests(Reqs, {ok,ok}, Port);
io_request(R, _Port) -> %Unknown request
@@ -176,47 +198,48 @@ io_reply(From, ReplyAs, Reply) ->
%% put_chars
put_chars(Chars, Port) when is_binary(Chars) ->
_ = put_port(Chars, Port),
- {ok,ok};
-put_chars(Chars, Port) ->
- case catch list_to_binary(Chars) of
- Binary when is_binary(Binary) ->
- put_chars(Binary, Port);
- _ ->
- {error,{error,put_chars}}
- end.
+ {ok,ok}.
%% setopts
-setopts(Opts0,Port) ->
- Opts = proplists:unfold(
- proplists:substitute_negations(
- [{latin1,unicode}],
- Opts0)),
+setopts(Opts0) ->
+ Opts = expand_encoding(Opts0),
case check_valid_opts(Opts) of
- true ->
- do_setopts(Opts,Port);
- false ->
- {error,{error,enotsup}}
+ true ->
+ do_setopts(Opts);
+ false ->
+ {error,{error,enotsup}}
end.
+
check_valid_opts([]) ->
true;
-check_valid_opts([{unicode,Valid}|T]) when Valid =:= true; Valid =:= utf8; Valid =:= false ->
+check_valid_opts([{encoding,Valid}|T]) when Valid =:= unicode;
+ Valid =:= utf8; Valid =:= latin1 ->
check_valid_opts(T);
check_valid_opts(_) ->
false.
-do_setopts(Opts, _Port) ->
- case proplists:get_value(unicode,Opts) of
- Valid when Valid =:= true; Valid =:= utf8 ->
- put(unicode,true);
- false ->
- put(unicode,false);
- undefined ->
- ok
+expand_encoding([]) ->
+ [];
+expand_encoding([latin1 | T]) ->
+ [{encoding,latin1} | expand_encoding(T)];
+expand_encoding([unicode | T]) ->
+ [{encoding,unicode} | expand_encoding(T)];
+expand_encoding([H|T]) ->
+ [H|expand_encoding(T)].
+
+do_setopts(Opts) ->
+ case proplists:get_value(encoding, Opts) of
+ Valid when Valid =:= unicode; Valid =:= utf8 ->
+ put(encoding, unicode);
+ latin1 ->
+ put(encoding, latin1);
+ undefined ->
+ ok
end,
{ok,ok}.
-getopts(_Port) ->
- Uni = {unicode, get(unicode) =:= true},
+getopts() ->
+ Uni = {encoding,get(encoding)},
{ok,[Uni]}.
wrap_characters_to_binary(Chars,From,To) ->
@@ -227,17 +250,17 @@ wrap_characters_to_binary(Chars,From,To) ->
_Else ->
16#10ffff
end,
- unicode:characters_to_binary(
- [ case X of
- $\n ->
- if
- TrNl ->
- "\r\n";
- true ->
- $\n
- end;
- High when High > Limit ->
- ["\\x{",erlang:integer_to_list(X, 16),$}];
- Ordinary ->
- Ordinary
- end || X <- unicode:characters_to_list(Chars,From) ],unicode,To).
+ case catch unicode:characters_to_list(Chars, From) of
+ L when is_list(L) ->
+ unicode:characters_to_binary(
+ [ case X of
+ $\n when TrNl ->
+ "\r\n";
+ High when High > Limit ->
+ ["\\x{",erlang:integer_to_list(X, 16),$}];
+ Low ->
+ Low
+ end || X <- L ], unicode, To);
+ _ ->
+ error
+ end.
diff --git a/lib/kernel/test/Makefile b/lib/kernel/test/Makefile
index f1b8a105ed..ef351a25fb 100644
--- a/lib/kernel/test/Makefile
+++ b/lib/kernel/test/Makefile
@@ -77,7 +77,8 @@ MODULES= \
ignore_cores \
zlib_SUITE \
loose_node \
- sendfile_SUITE
+ sendfile_SUITE \
+ standard_error_SUITE
APP_FILES = \
appinc.app \
diff --git a/lib/kernel/test/erl_distribution_SUITE.erl b/lib/kernel/test/erl_distribution_SUITE.erl
index 9cccdab76b..15c2adc957 100644
--- a/lib/kernel/test/erl_distribution_SUITE.erl
+++ b/lib/kernel/test/erl_distribution_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2011. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -26,7 +26,8 @@
-export([tick/1, tick_change/1, illegal_nodenames/1, hidden_node/1,
table_waste/1, net_setuptime/1,
-
+ inet_dist_options_options/1,
+
monitor_nodes_nodedown_reason/1,
monitor_nodes_complex_nodedown_reason/1,
monitor_nodes_node_type/1,
@@ -38,7 +39,8 @@
monitor_nodes_many/1]).
%% Performs the test at another node.
--export([tick_cli_test/1, tick_cli_test1/1,
+-export([get_socket_priorities/0,
+ tick_cli_test/1, tick_cli_test1/1,
tick_serv_test/2, tick_serv_test1/1,
keep_conn/1, time_ping/1]).
@@ -62,7 +64,8 @@ suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
[tick, tick_change, illegal_nodenames, hidden_node,
- table_waste, net_setuptime, {group, monitor_nodes}].
+ table_waste, net_setuptime, inet_dist_options_options,
+ {group, monitor_nodes}].
groups() ->
[{monitor_nodes, [],
@@ -554,6 +557,71 @@ check_monitor_nodes_res(Pid, Node) ->
end.
+
+inet_dist_options_options(suite) -> [];
+inet_dist_options_options(doc) ->
+ ["Check the kernel inet_dist_{listen,connect}_options options"];
+inet_dist_options_options(Config) when is_list(Config) ->
+ Prio = 1,
+ case gen_udp:open(0, [{priority,Prio}]) of
+ {ok,Socket} ->
+ case inet:getopts(Socket, [priority]) of
+ {ok,[{priority,Prio}]} ->
+ ok = gen_udp:close(Socket),
+ do_inet_dist_options_options(Prio);
+ _ ->
+ ok = gen_udp:close(Socket),
+ {skip,
+ "Can not set priority "++integer_to_list(Prio)++
+ " on socket"}
+ end;
+ {error,_} ->
+ {skip, "Can not set priority on socket"}
+ end.
+
+do_inet_dist_options_options(Prio) ->
+ PriorityString0 = "[{priority,"++integer_to_list(Prio)++"}]",
+ PriorityString =
+ case os:cmd("echo [{a,1}]") of
+ "[{a,1}]"++_ ->
+ PriorityString0;
+ _ ->
+ %% Some shells need quoting of [{}]
+ "'"++PriorityString0++"'"
+ end,
+ InetDistOptions =
+ "-hidden "
+ "-kernel inet_dist_connect_options "++PriorityString++" "
+ "-kernel inet_dist_listen_options "++PriorityString,
+ ?line {ok,Node1} =
+ start_node(inet_dist_options_1, InetDistOptions),
+ ?line {ok,Node2} =
+ start_node(inet_dist_options_2, InetDistOptions),
+ %%
+ ?line pong =
+ rpc:call(Node1, net_adm, ping, [Node2]),
+ ?line PrioritiesNode1 =
+ rpc:call(Node1, ?MODULE, get_socket_priorities, []),
+ ?line PrioritiesNode2 =
+ rpc:call(Node2, ?MODULE, get_socket_priorities, []),
+ ?line ?t:format("PrioritiesNode1 = ~p", [PrioritiesNode1]),
+ ?line ?t:format("PrioritiesNode2 = ~p", [PrioritiesNode2]),
+ ?line Elevated = [P || P <- PrioritiesNode1, P =:= Prio],
+ ?line Elevated = [P || P <- PrioritiesNode2, P =:= Prio],
+ ?line [_|_] = Elevated,
+ %%
+ ?line stop_node(Node2),
+ ?line stop_node(Node1),
+ ok.
+
+get_socket_priorities() ->
+ [Priority ||
+ {ok,[{priority,Priority}]} <-
+ [inet:getopts(Port, [priority]) ||
+ Port <- erlang:ports(),
+ element(2, erlang:port_info(Port, name)) =:= "tcp_inet"]].
+
+
%%
%% Testcase:
diff --git a/lib/kernel/test/file_SUITE.erl b/lib/kernel/test/file_SUITE.erl
index 2ce2303ba3..1213d8e37e 100644
--- a/lib/kernel/test/file_SUITE.erl
+++ b/lib/kernel/test/file_SUITE.erl
@@ -93,6 +93,8 @@
-export([old_io_protocol/1]).
+-export([unicode_mode/1]).
+
%% Debug exports
-export([create_file_slow/2, create_file/2, create_bin/2]).
-export([verify_file/2, verify_bin/3]).
@@ -105,6 +107,7 @@
-include_lib("test_server/include/test_server.hrl").
-include_lib("kernel/include/file.hrl").
+-define(THROW_ERROR(RES), throw({fail, ?LINE, RES})).
suite() -> [{ct_hooks,[ts_install_cth]}].
@@ -116,7 +119,9 @@ all() ->
delayed_write, read_ahead, segment_read, segment_write,
ipread, pid2name, interleaved_read_write, otp_5814, otp_10852,
large_file, large_write, read_line_1, read_line_2, read_line_3,
- read_line_4, standard_io, old_io_protocol].
+ read_line_4, standard_io, old_io_protocol,
+ unicode_mode
+ ].
groups() ->
[{dirs, [], [make_del_dir, cur_dir_0, cur_dir_1,
@@ -347,8 +352,153 @@ old_io_protocol(Config) when is_list(Config) ->
[] = flush(),
ok.
+unicode_mode(suite) -> [];
+unicode_mode(doc) -> [""];
+unicode_mode(Config) ->
+ Dir = {dir, ?config(priv_dir,Config)},
+ OptVariants = [[Dir],
+ [Dir, {encoding, utf8}],
+ [Dir, binary],
+ [Dir, binary, {encoding, utf8}]
+ ],
+ ReadVariants = [{read, fun(Fd) -> um_read(Fd, fun(Fd1) -> file:read(Fd1, 1024) end) end},
+ {read_line, fun(Fd) -> um_read(Fd, fun(Fd1) -> file:read_line(Fd1) end) end}
+ %%{pread, fun(Fd) -> file:pread(Fd, 0, 1024) end},
+ %%{preadl, fun(Fd) -> file:pread(Fd, [{0, 1024}]) end},
+ ],
+
+ _ = [read_write_0("ASCII: list: Hello World", Read, Opt) ||
+ Opt <- OptVariants, Read <- ReadVariants],
+ _ = [read_write_0("LATIN1: list: åäöÅÄÖ", Read, Opt) ||
+ Opt <- OptVariants, Read <- ReadVariants],
+ _ = [read_write_0(<<"ASCII: bin: Hello World">>, Read, Opt) ||
+ Opt <- OptVariants, Read <- ReadVariants],
+ _ = [read_write_0(<<"LATIN1: bin: åäöÅÄÖ">>, Read, Opt) ||
+ Opt <- OptVariants, Read <- ReadVariants],
+ %% These will be double encoded if option is encoding utf-8
+ _ = [read_write_0(<<"UTF8: bin: Ωß"/utf8>>, Read, Opt) ||
+ Opt <- OptVariants, Read <- ReadVariants],
+ %% These should not work (with encoding set to utf-8)
+ %% according to file's documentation
+ _ = [read_write_0("UTF8: list: Ωß", Read, Opt) ||
+ Opt <- OptVariants, Read <- ReadVariants],
+ ok.
+
+read_write_0(Str, {Func, ReadFun}, Options) ->
+ try
+ Res = read_write_1(Str, ReadFun, Options),
+ io:format("~p: ~ts ~p '~p'~n", [Func, Str, tl(Options), Res]),
+ ok
+ catch {fail, Line, ReadBytes = [_|_]} ->
+ io:format("~p:~p: ~p ERROR: ~w vs~n ~w~n - ~p~n",
+ [?MODULE, Line, Func, Str, ReadBytes, Options]),
+ exit({error, ?LINE});
+ {fail, Line, ReadBytes} ->
+ io:format("~p:~p: ~p ERROR: ~ts vs~n ~w~n - ~p~n",
+ [?MODULE, Line, Func, Str, ReadBytes, Options]),
+ exit({error, ?LINE});
+ error:What ->
+ io:format("~p:??: ~p ERROR: ~p from~n ~w~n ~p~n",
+ [?MODULE, Func, What, Str, Options]),
+
+ io:format("\t~p~n", [erlang:get_stacktrace()]),
+ exit({error, ?LINE})
+ end.
+
+read_write_1(Str0, ReadFun, [{dir,Dir}|Options]) ->
+ File = um_filename(Str0, Dir, Options),
+ Pre = "line 1\n", Post = "\nlast line\n",
+ Str = case is_list(Str0) andalso lists:max(Str0) > 255 of
+ false -> %% Normal case Use options
+ {ok, FdW} = file:open(File, [write|Options]),
+ IO = [Pre, Str0, Post],
+ ok = file:write(FdW, IO),
+ case is_binary(Str0) of
+ true -> iolist_to_binary(IO);
+ false -> lists:append(IO)
+ end;
+ true -> %% Test unicode lists
+ {ok, FdW} = file:open(File, [write]),
+ Utf8 = unicode:characters_to_binary([Pre, Str0, Post]),
+ file:write(FdW, Utf8),
+ {unicode, Utf8}
+ end,
+ file:close(FdW),
+ {ok, FdR} = file:open(File, [read|Options]),
+ ReadRes = ReadFun(FdR),
+ file:close(FdR),
+ Res = um_check(Str, ReadRes, Options),
+ file:delete(File),
+ Res.
+um_read(Fd, Fun) ->
+ um_read(Fd, Fun, []).
+
+um_read(Fd, Fun, Acc) ->
+ case Fun(Fd) of
+ eof ->
+ case is_binary(hd(Acc)) of
+ true -> {ok, iolist_to_binary(lists:reverse(Acc))};
+ false -> {ok, lists:append(lists:reverse(Acc))}
+ end;
+ {ok, Data} ->
+ um_read(Fd, Fun, [Data|Acc]);
+ Error ->
+ Error
+ end.
+
+
+um_check(Str, {ok, Str}, _) -> ok;
+um_check(Bin, {ok, Res}, _Options) when is_binary(Bin), is_list(Res) ->
+ case list_to_binary(Res) of
+ Bin -> ok;
+ _ -> ?THROW_ERROR(Res)
+ end;
+um_check(Str, {ok, Res}, _Options) when is_list(Str), is_binary(Res) ->
+ case iolist_to_binary(Str) of
+ Res -> ok;
+ _ -> ?THROW_ERROR(Res)
+ end;
+um_check({unicode, Utf8Bin}, Res, Options) ->
+ um_check_unicode(Utf8Bin, Res,
+ proplists:get_value(binary, Options, false),
+ proplists:get_value(encoding, Options, none));
+um_check(_Str, Res, _Options) ->
+ ?THROW_ERROR(Res).
+
+um_check_unicode(Utf8Bin, {ok, Utf8Bin}, true, none) ->
+ ok;
+um_check_unicode(Utf8Bin, {ok, List = [_|_]}, false, none) ->
+ case binary_to_list(Utf8Bin) == List of
+ true -> ok;
+ false -> ?THROW_ERROR(List)
+ end;
+um_check_unicode(_Utf8Bin, {error, {no_translation, unicode, latin1}}, _, _) ->
+ no_translation;
+um_check_unicode(_Utf8Bin, Error = {error, _}, _, _Unicode) ->
+ ?THROW_ERROR(Error);
+um_check_unicode(_Utf8Bin, {ok, _ListOrBin}, _, _UTF8_) ->
+ %% List = if is_binary(ListOrBin) -> unicode:characters_to_list(ListOrBin);
+ %% true -> ListOrBin
+ %% end,
+ %% io:format("In: ~w~n", [binary_to_list(Utf8Bin)]),
+ %% io:format("Ut: ~w~n", [List]),
+ ?THROW_ERROR({shoud_be, no_translation}).
+
+um_filename(Bin, Dir, Options) when is_binary(Bin) ->
+ um_filename(binary_to_list(Bin), Dir, Options);
+um_filename(Str = [_|_], Dir, Options) ->
+ Name = hd(string:tokens(Str, ":")),
+ Enc = atom_to_list(proplists:get_value(encoding, Options, latin1)),
+ File = case lists:member(binary, Options) of
+ true ->
+ "test_" ++ Name ++ "_bin_enc_" ++ Enc;
+ false ->
+ "test_" ++ Name ++ "_list_enc_" ++ Enc
+ end,
+ filename:join(Dir, File).
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
read_write_file(suite) -> [];
diff --git a/lib/kernel/test/inet_SUITE.erl b/lib/kernel/test/inet_SUITE.erl
index d45dfc2173..849013ac79 100644
--- a/lib/kernel/test/inet_SUITE.erl
+++ b/lib/kernel/test/inet_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -36,6 +36,7 @@
gethostnative_parallell/1, cname_loop/1,
gethostnative_soft_restart/0, gethostnative_soft_restart/1,
gethostnative_debug_level/0, gethostnative_debug_level/1,
+ lookup_bad_search_option/1,
getif/1,
getif_ifr_name_overflow/1,getservbyname_overflow/1, getifaddrs/1,
parse_strict_address/1, simple_netns/1, simple_netns_open/1]).
@@ -52,6 +53,7 @@ all() ->
ipv4_to_ipv6, host_and_addr, {group, parse},
t_gethostnative, gethostnative_parallell, cname_loop,
gethostnative_debug_level, gethostnative_soft_restart,
+ lookup_bad_search_option,
getif, getif_ifr_name_overflow, getservbyname_overflow,
getifaddrs, parse_strict_address, simple_netns, simple_netns_open].
@@ -908,6 +910,21 @@ lookup_loop([H|Hs], Delay, Tag, Parent, Cnt, Hosts) ->
+lookup_bad_search_option(suite) ->
+ [];
+lookup_bad_search_option(doc) ->
+ ["Test lookup with erroneously configured lookup option (OTP-12133)"];
+lookup_bad_search_option(Config) when is_list(Config) ->
+ Db = inet_db,
+ %% The bad option can not enter through inet_db:set_lookup/1,
+ %% but through e.g .inetrc.
+ ets:insert(Db, {res_lookup,[lookup_bad_search_option]}),
+ {ok,Hostname} = inet:gethostname(),
+ {ok,_Hent} = inet:gethostbyname(Hostname), % Will hang loop for this bug
+ ok.
+
+
+
getif(suite) ->
[];
getif(doc) ->
diff --git a/lib/kernel/test/standard_error_SUITE.erl b/lib/kernel/test/standard_error_SUITE.erl
new file mode 100644
index 0000000000..b290454b40
--- /dev/null
+++ b/lib/kernel/test/standard_error_SUITE.erl
@@ -0,0 +1,38 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(standard_error_SUITE).
+
+-export([all/0,suite/0]).
+-export([badarg/1,getopts/1]).
+
+suite() ->
+ [{ct_hooks,[ts_install_cth]}].
+
+all() ->
+ [badarg,getopts].
+
+badarg(Config) when is_list(Config) ->
+ {'EXIT',{badarg,_}} = (catch io:put_chars(standard_error, [oops])),
+ true = erlang:is_process_alive(whereis(standard_error)),
+ ok.
+
+getopts(Config) when is_list(Config) ->
+ [{encoding,latin1}] = io:getopts(standard_error),
+ ok.
diff --git a/lib/kernel/test/zlib_SUITE.erl b/lib/kernel/test/zlib_SUITE.erl
index 3be6f39d95..e99151284f 100644
--- a/lib/kernel/test/zlib_SUITE.erl
+++ b/lib/kernel/test/zlib_SUITE.erl
@@ -82,7 +82,7 @@ groups() ->
api_deflateSetDictionary, api_deflateReset,
api_deflateParams, api_deflate, api_deflateEnd,
api_inflateInit, api_inflateSetDictionary,
- api_inflateSync, api_inflateReset, api_inflate,
+ api_inflateSync, api_inflateReset, api_inflate, api_inflateChunk,
api_inflateEnd, api_setBufsz, api_getBufsz, api_crc32,
api_adler32, api_getQSize, api_un_compress, api_un_zip,
api_g_un_zip]},
@@ -357,6 +357,39 @@ api_inflate(Config) when is_list(Config) ->
?m({'EXIT',{data_error,_}}, zlib:inflate(Z1, <<2,1,2,1,2>>)),
?m(ok, zlib:close(Z1)).
+api_inflateChunk(doc) -> "Test inflateChunk";
+api_inflateChunk(suite) -> [];
+api_inflateChunk(Config) when is_list(Config) ->
+ ChunkSize = 1024,
+ Data = << <<(I rem 150)>> || I <- lists:seq(1, 3 * ChunkSize) >>,
+ Part1 = binary:part(Data, 0, ChunkSize),
+ Part2 = binary:part(Data, ChunkSize, ChunkSize),
+ Part3 = binary:part(Data, ChunkSize * 2, ChunkSize),
+ ?line Compressed = zlib:compress(Data),
+ ?line Z1 = zlib:open(),
+ ?line zlib:setBufSize(Z1, ChunkSize),
+ ?m(ok, zlib:inflateInit(Z1)),
+ ?m([], zlib:inflateChunk(Z1, <<>>)),
+ ?m({more, Part1}, zlib:inflateChunk(Z1, Compressed)),
+ ?m({more, Part2}, zlib:inflateChunk(Z1)),
+ ?m(Part3, zlib:inflateChunk(Z1)),
+ ?m(ok, zlib:inflateEnd(Z1)),
+
+ ?m(ok, zlib:inflateInit(Z1)),
+ ?m({more, Part1}, zlib:inflateChunk(Z1, Compressed)),
+
+ ?m(ok, zlib:inflateReset(Z1)),
+
+ ?line zlib:setBufSize(Z1, size(Data)),
+ ?m(Data, zlib:inflateChunk(Z1, Compressed)),
+ ?m(ok, zlib:inflateEnd(Z1)),
+
+ ?m(ok, zlib:inflateInit(Z1)),
+ ?m(?BARG, zlib:inflateChunk(gurka, Compressed)),
+ ?m(?BARG, zlib:inflateChunk(Z1, 4384)),
+ ?m({'EXIT',{data_error,_}}, zlib:inflateEnd(Z1)),
+ ?m(ok, zlib:close(Z1)).
+
api_inflateEnd(doc) -> "Test inflateEnd";
api_inflateEnd(suite) -> [];
api_inflateEnd(Config) when is_list(Config) ->
diff --git a/lib/mnesia/doc/src/Mnesia_chap5.xmlsrc b/lib/mnesia/doc/src/Mnesia_chap5.xmlsrc
index 65b950bd46..127c23e0f7 100644
--- a/lib/mnesia/doc/src/Mnesia_chap5.xmlsrc
+++ b/lib/mnesia/doc/src/Mnesia_chap5.xmlsrc
@@ -867,6 +867,7 @@ ok
</section>
<section>
+ <marker id="event_handling"></marker>
<title>Mnesia Event Handling</title>
<p>System events and table events are the two categories of events
that Mnesia will generate in various situations.
diff --git a/lib/mnesia/doc/src/mnesia.xml b/lib/mnesia/doc/src/mnesia.xml
index c23c2cb226..856a7594a7 100644
--- a/lib/mnesia/doc/src/mnesia.xml
+++ b/lib/mnesia/doc/src/mnesia.xml
@@ -863,7 +863,7 @@ mnesia:create_table(person,
{attributes, record_info(fields,person)}]).
</code>
<p>The specification of <c>index</c> and <c>attributes</c> may be
- hard coded as <c>{index, [2]}</c> and
+ hard coded as <c>{index, [4]}</c> and
<c>{attributes, [name, age, address, salary, children]}</c>
respectively.
</p>
@@ -2188,12 +2188,13 @@ mnesia:create_table(employee,
</desc>
</func>
<func>
- <name>subscribe(EventCategory)</name>
+ <name>subscribe(EventCategory) -> {ok, Node} | {error, Reason} </name>
<fsummary>Subscribe to events of type <c>EventCategory</c>.</fsummary>
<desc>
<p>Ensures that a copy of all events of type
<c>EventCategory</c> are sent to the caller. The event
- types available are described in the Mnesia User's Guide.</p>
+ types available are described in the Mnesia User's Guide at <seealso marker="Mnesia_chap5#event_handling">Mnesia Event Handling</seealso>.</p>
+ <p><c>Node</c> is the local node. For table events to be subscribed, mnesia must have a readable local copy of the table on the node.</p>
</desc>
</func>
<func>
@@ -2861,11 +2862,12 @@ raise(Name, Amount) ->
</desc>
</func>
<func>
- <name>unsubscribe(EventCategory)</name>
+ <name>unsubscribe(EventCategory) -> {ok, Node} | {error, Reason} </name>
<fsummary>Subscribe to events of type <c>EventCategory</c>.</fsummary>
<desc>
<p>Stops sending events of type
<c>EventCategory</c> to the caller.</p>
+ <p><c>Node</c> is the local node.</p>
</desc>
</func>
<func>
@@ -3017,6 +3019,12 @@ raise(Name, Amount) ->
totally unpredictable.</p>
</item>
<item>
+ <p><c>-mnesia dump_disc_copies_at_startup true | false</c>.
+ If set to false, this disables the dumping of <c>disc_copies</c>
+ tables during startup while tables are being loaded. The default
+ is true.</p>
+ </item>
+ <item>
<p><c>-mnesia dump_log_load_regulation true | false</c>.
Controls if the log dumps should be performed as fast as
possible or if the dumper should do its own load
diff --git a/lib/mnesia/src/mnesia_controller.erl b/lib/mnesia/src/mnesia_controller.erl
index 5a9bae54da..aa72de7594 100644
--- a/lib/mnesia/src/mnesia_controller.erl
+++ b/lib/mnesia/src/mnesia_controller.erl
@@ -51,6 +51,7 @@
force_load_table/1,
async_dump_log/1,
sync_dump_log/1,
+ snapshot_dcd/1,
connect_nodes/1,
connect_nodes/2,
wait_for_schema_commit_lock/0,
@@ -139,7 +140,8 @@ max_loaders() ->
-record(block_controller, {owner}).
-record(dump_log, {initiated_by,
- opt_reply_to
+ opt_reply_to,
+ operation = dump_log
}).
-record(net_load, {table,
@@ -201,6 +203,15 @@ async_dump_log(InitBy) ->
?SERVER_NAME ! {async_dump_log, InitBy},
ok.
+snapshot_dcd(Tables) when is_list(Tables) ->
+ case [T || T <- Tables,
+ mnesia_lib:storage_type_at_node(node(), T) =/= disc_copies] of
+ [] ->
+ call({snapshot_dcd, Tables});
+ BadTabs ->
+ {error, {not_disc_copies, BadTabs}}
+ end.
+
%% Wait for tables to be active
%% If needed, we will wait for Mnesia to start
%% If Mnesia stops, we will wait for Mnesia to restart
@@ -646,6 +657,15 @@ handle_call({sync_dump_log, InitBy}, From, State) ->
State2 = add_worker(Worker, State),
noreply(State2);
+handle_call({snapshot_dcd, Tables}, From, State) ->
+ Worker = #dump_log{initiated_by = user,
+ opt_reply_to = From,
+ operation = fun() ->
+ mnesia_dumper:snapshot_dcd(Tables)
+ end},
+ State2 = add_worker(Worker, State),
+ noreply(State2);
+
handle_call(wait_for_schema_commit_lock, From, State) ->
Worker = #schema_commit_lock{owner = From},
State2 = add_worker(Worker, State),
@@ -2089,7 +2109,12 @@ start_remote_sender(Node, Tab, Receiver, Storage) ->
dump_and_reply(ReplyTo, Worker) ->
%% No trap_exit, die intentionally instead
- Res = mnesia_dumper:opt_dump_log(Worker#dump_log.initiated_by),
+ Res = case Worker#dump_log.operation of
+ dump_log ->
+ mnesia_dumper:opt_dump_log(Worker#dump_log.initiated_by);
+ F when is_function(F, 0) ->
+ F()
+ end,
ReplyTo ! #dumper_done{worker_pid = self(),
worker_res = Res},
unlink(ReplyTo),
diff --git a/lib/mnesia/src/mnesia_dumper.erl b/lib/mnesia/src/mnesia_dumper.erl
index 14665797a0..509b765dee 100644
--- a/lib/mnesia/src/mnesia_dumper.erl
+++ b/lib/mnesia/src/mnesia_dumper.erl
@@ -34,11 +34,13 @@
-export([
get_log_writes/0,
incr_log_writes/0,
+ needs_dump_ets/1,
raw_dump_table/2,
raw_named_dump_table/2,
start_regulator/0,
opt_dump_log/1,
- update/3
+ update/3,
+ snapshot_dcd/1
]).
%% Internal stuff
@@ -99,6 +101,19 @@ opt_dump_log(InitBy) ->
end,
perform_dump(InitBy, Reg).
+snapshot_dcd(Tables) ->
+ lists:foreach(
+ fun(Tab) ->
+ case mnesia_lib:storage_type_at_node(node(), Tab) of
+ disc_copies ->
+ mnesia_log:ets2dcd(Tab);
+ _ ->
+ %% Storage type was checked before queueing the op, though
+ skip
+ end
+ end, Tables),
+ dumped.
+
%% Scan for decisions
perform_dump(InitBy, Regulator) when InitBy == scan_decisions ->
?eval_debug_fun({?MODULE, perform_dump}, [InitBy]),
@@ -981,28 +996,10 @@ open_files(_Tab, _Storage, _UpdateInPlace, _InitBy) ->
false.
open_disc_copies(Tab, InitBy) ->
- DclF = mnesia_lib:tab2dcl(Tab),
- DumpEts =
- case file:read_file_info(DclF) of
- {error, enoent} ->
- false;
- {ok, DclInfo} ->
- DcdF = mnesia_lib:tab2dcd(Tab),
- case file:read_file_info(DcdF) of
- {error, Reason} ->
- mnesia_lib:dbg_out("File ~p info_error ~p ~n",
- [DcdF, Reason]),
- true;
- {ok, DcdInfo} ->
- Mul = case ?catch_val(dc_dump_limit) of
- {'EXIT', _} -> ?DumpToEtsMultiplier;
- Val -> Val
- end,
- DcdInfo#file_info.size =< (DclInfo#file_info.size * Mul)
- end
- end,
+ DumpEts = needs_dump_ets(Tab),
if
DumpEts == false; InitBy == startup ->
+ DclF = mnesia_lib:tab2dcl(Tab),
mnesia_log:open_log({?MODULE,Tab},
mnesia_log:dcl_log_header(),
DclF,
@@ -1017,6 +1014,27 @@ open_disc_copies(Tab, InitBy) ->
false
end.
+needs_dump_ets(Tab) ->
+ DclF = mnesia_lib:tab2dcl(Tab),
+ case file:read_file_info(DclF) of
+ {error, enoent} ->
+ false;
+ {ok, DclInfo} ->
+ DcdF = mnesia_lib:tab2dcd(Tab),
+ case file:read_file_info(DcdF) of
+ {error, Reason} ->
+ mnesia_lib:dbg_out("File ~p info_error ~p ~n",
+ [DcdF, Reason]),
+ true;
+ {ok, DcdInfo} ->
+ Mul = case ?catch_val(dc_dump_limit) of
+ {'EXIT', _} -> ?DumpToEtsMultiplier;
+ Val -> Val
+ end,
+ DcdInfo#file_info.size =< (DclInfo#file_info.size * Mul)
+ end
+ end.
+
%% Always opens the dcl file for writing overriding already_dumped
%% mechanismen, used for schema transactions.
open_dcl(Tab) ->
diff --git a/lib/mnesia/src/mnesia_loader.erl b/lib/mnesia/src/mnesia_loader.erl
index 530317bcdd..cbb3d7e430 100644
--- a/lib/mnesia/src/mnesia_loader.erl
+++ b/lib/mnesia/src/mnesia_loader.erl
@@ -69,9 +69,10 @@ do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == disc_copies ->
ignore;
_ ->
mnesia_monitor:mktab(Tab, Args),
- Count = mnesia_log:dcd2ets(Tab, Repair),
- case ets:info(Tab, size) of
- X when X < Count * 4 ->
+ _Count = mnesia_log:dcd2ets(Tab, Repair),
+ case mnesia_monitor:get_env(dump_disc_copies_at_startup)
+ andalso mnesia_dumper:needs_dump_ets(Tab) of
+ true ->
ok = mnesia_log:ets2dcd(Tab);
_ ->
ignore
diff --git a/lib/mnesia/src/mnesia_locker.erl b/lib/mnesia/src/mnesia_locker.erl
index e27396731f..1efb939e00 100644
--- a/lib/mnesia/src/mnesia_locker.erl
+++ b/lib/mnesia/src/mnesia_locker.erl
@@ -982,8 +982,14 @@ sticky_flush(Ns=[Node | Tail], Store) ->
flush_remaining([], _SkipNode, Res) ->
del_debug(),
exit(Res);
-flush_remaining([SkipNode | Tail ], SkipNode, Res) ->
- flush_remaining(Tail, SkipNode, Res);
+flush_remaining(Ns=[SkipNode | Tail ], SkipNode, Res) ->
+ add_debug(Ns),
+ receive
+ {?MODULE, SkipNode, _} ->
+ flush_remaining(Tail, SkipNode, Res)
+ after 0 ->
+ flush_remaining(Tail, SkipNode, Res)
+ end;
flush_remaining(Ns=[Node | Tail], SkipNode, Res) ->
add_debug(Ns),
receive
diff --git a/lib/mnesia/src/mnesia_monitor.erl b/lib/mnesia/src/mnesia_monitor.erl
index 6fc1a394a6..a0e0e630ec 100644
--- a/lib/mnesia/src/mnesia_monitor.erl
+++ b/lib/mnesia/src/mnesia_monitor.erl
@@ -664,6 +664,7 @@ env() ->
backup_module,
debug,
dir,
+ dump_disc_copies_at_startup,
dump_log_load_regulation,
dump_log_time_threshold,
dump_log_update_in_place,
@@ -692,6 +693,8 @@ default_env(debug) ->
default_env(dir) ->
Name = lists:concat(["Mnesia.", node()]),
filename:absname(Name);
+default_env(dump_disc_copies_at_startup) ->
+ true;
default_env(dump_log_load_regulation) ->
false;
default_env(dump_log_time_threshold) ->
@@ -741,6 +744,7 @@ do_check_type(debug, trace) -> trace;
do_check_type(debug, true) -> debug;
do_check_type(debug, verbose) -> verbose;
do_check_type(dir, V) -> filename:absname(V);
+do_check_type(dump_disc_copies_at_startup, B) -> bool(B);
do_check_type(dump_log_load_regulation, B) -> bool(B);
do_check_type(dump_log_time_threshold, I) when is_integer(I), I > 0 -> I;
do_check_type(dump_log_update_in_place, B) -> bool(B);
diff --git a/lib/mnesia/src/mnesia_recover.erl b/lib/mnesia/src/mnesia_recover.erl
index b6492707e2..eeb4fa0ced 100644
--- a/lib/mnesia/src/mnesia_recover.erl
+++ b/lib/mnesia/src/mnesia_recover.erl
@@ -689,12 +689,29 @@ handle_call({connect_nodes, Ns}, From, State) ->
%% called from handle_info
gen_server:reply(From, {[], AlreadyConnected}),
{noreply, State};
- GoodNodes ->
+ ProbablyGoodNodes ->
%% Now we have agreed upon a protocol with some new nodes
- %% and we may use them when we recover transactions
+ %% and we may use them when we recover transactions.
+ %%
+ %% Just in case Mnesia was stopped on some of those nodes
+ %% between the protocol negotiation and now, we check one
+ %% more time the state of Mnesia.
+ %%
+ %% Of course, there is still a chance that mnesia_down
+ %% events occur during this check and we miss them. To
+ %% prevent it, handle_cast({mnesia_down, ...}, ...) removes
+ %% the down node again, in addition to mnesia_down/1.
+ %%
+ %% See a comment in handle_cast({mnesia_down, ...}, ...).
+ Verify = fun(N) ->
+ Run = mnesia_lib:is_running(N),
+ Run =:= yes orelse Run =:= starting
+ end,
+ GoodNodes = [N || N <- ProbablyGoodNodes, Verify(N)],
+
mnesia_lib:add_list(recover_nodes, GoodNodes),
cast({announce_all, GoodNodes}),
- case get_master_nodes(schema) of
+ case get_master_nodes(schema) of
[] ->
Context = starting_partitioned_network,
mnesia_monitor:detect_inconcistency(GoodNodes, Context);
@@ -842,6 +859,14 @@ handle_cast({what_decision, Node, OtherD}, State) ->
{noreply, State};
handle_cast({mnesia_down, Node}, State) ->
+ %% The node was already removed from recover_nodes in mnesia_down/1,
+ %% but we do it again here in the mnesia_recover process, in case
+ %% another event incorrectly added it back. This can happen during
+ %% Mnesia startup which takes time betweenthe connection, the
+ %% protocol negotiation and the merge of the schema.
+ %%
+ %% See a comment in handle_call({connect_nodes, ...), ...).
+ mnesia_lib:del(recover_nodes, Node),
case State#state.unclear_decision of
undefined ->
{noreply, State};
diff --git a/lib/mnesia/test/mnesia_recovery_test.erl b/lib/mnesia/test/mnesia_recovery_test.erl
index 0d0ad32fb0..946a9f97ba 100644
--- a/lib/mnesia/test/mnesia_recovery_test.erl
+++ b/lib/mnesia/test/mnesia_recovery_test.erl
@@ -320,7 +320,9 @@ read_during_down(Op, Config) when is_list(Config) ->
?log("W2R ~p~n", [W2R]),
loop_and_kill_mnesia(10, hd(W2R), Tabs),
[Pid ! self() || Pid <- Readers],
- ?match([ok, ok, ok], [receive ok -> ok after 1000 -> {Pid, mnesia_lib:dist_coredump()} end || Pid <- Readers]),
+ ?match([ok, ok, ok],
+ [receive ok -> ok after 5000 -> {Pid, mnesia_lib:dist_coredump()} end
+ || Pid <- Readers]),
?verify_mnesia(Ns, []).
reader(Tab, OP) ->
@@ -338,8 +340,12 @@ reader(Tab, OP) ->
?error("Expected ~p Got ~p ~n", [[{Tab, key, val}], Else]),
erlang:error(test_failed)
end,
- receive Pid ->
- Pid ! ok
+ receive
+ Pid when is_pid(Pid) ->
+ Pid ! ok;
+ Other ->
+ io:format("Msg: ~p~n", [Other]),
+ error(Other)
after 50 ->
reader(Tab, OP)
end.
@@ -1537,6 +1543,7 @@ disc_less(Config) when is_list(Config) ->
timer:sleep(500),
?match(ok, rpc:call(Node3, mnesia, start, [[{extra_db_nodes, [Node1, Node2]}]])),
?match(ok, rpc:call(Node3, mnesia, wait_for_tables, [[Tab1, Tab2, Tab3], 20000])),
+ ?match(ok, rpc:call(Node1, mnesia, wait_for_tables, [[Tab1, Tab2, Tab3], 20000])),
?match(ok, rpc:call(Node3, ?MODULE, verify_data, [Tab1, 100])),
?match(ok, rpc:call(Node3, ?MODULE, verify_data, [Tab2, 100])),
diff --git a/lib/mnesia/test/mnesia_test_lib.hrl b/lib/mnesia/test/mnesia_test_lib.hrl
index 94a195f01f..cd76377df6 100644
--- a/lib/mnesia/test/mnesia_test_lib.hrl
+++ b/lib/mnesia/test/mnesia_test_lib.hrl
@@ -66,12 +66,14 @@
?verbose("ok, ~n Result as expected:~p~n",[_AR_2]),
{success,_AR_2};
_AR_2 ->
- ?error("Not Matching Actual result was:~n ~p~n", [_AR_2]),
+ ?error("Not Matching Actual result was:~n ~p~n ~p~n",
+ [_AR_2, erlang:get_stacktrace()]),
{fail,_AR_2}
end;
- _:_AR_1 ->
- ?error("Not Matching Actual result was:~n ~p~n", [_AR_1]),
- {fail,_AR_1}
+ _T1_:_AR_1 ->
+ ?error("Not Matching Actual result was:~n ~p~n ~p~n",
+ [{_T1_,_AR_1}, erlang:get_stacktrace()]),
+ {fail,{_T1_,_AR_1}}
end
end()).
diff --git a/lib/observer/doc/src/observer_ug.xml b/lib/observer/doc/src/observer_ug.xml
index 62f99c5210..fcb42f6c31 100644
--- a/lib/observer/doc/src/observer_ug.xml
+++ b/lib/observer/doc/src/observer_ug.xml
@@ -104,6 +104,29 @@
<note>
<p><em>Reds</em> can be presented as accumulated values or as values since last update.</p>
</note>
+ <p><c>Process info</c> open a detailed information window on the selected process.
+ <taglist>
+ <tag>Process Information</tag>
+ <item>Shows the process information.</item>
+ <tag>Messages</tag>
+ <item>Shows the process messages.</item>
+ <tag>Dictionary</tag>
+ <item>Shows the process dictionary.</item>
+ <tag>Stack Trace</tag>
+ <item>Shows the process current stack trace.</item>
+ <tag>State</tag>
+ <item>Show the process state.</item>
+ <tag>Log</tag>
+ <item>If enabled and available, show the process SASL log entries.</item>
+ </taglist>
+ <note>
+ <p><c>Log</c> needs SASL application to be started on the observed node, with log_mf_h as log handler.
+ The Observed node must be R16B02 or higher.
+ <c>rb</c> server must not be started on the observed node when clicking on menu 'Log/Toggle log view'.
+ <c>rb</c> server will be stopped on the observed node when exiting or changing observed node.
+ </p>
+ </note>
+ </p>
<p><c>Trace Processes</c> will add the selected process identifiers to the <c>Trace Overview</c> view and the
node the processes reside on will be added as well.
<c>Trace Named Processes</c> will add the registered name of processes. This can be useful
diff --git a/lib/observer/src/cdv_proc_cb.erl b/lib/observer/src/cdv_proc_cb.erl
index dfc2df9c4c..d1549f79eb 100644
--- a/lib/observer/src/cdv_proc_cb.erl
+++ b/lib/observer/src/cdv_proc_cb.erl
@@ -129,6 +129,7 @@ info_fields() ->
{"Started", start_time},
{"Parent", {click,parent}},
{"Message Queue Len",msg_q_len},
+ {"Run queue", run_queue},
{"Reductions", reds},
{"Program counter", prog_count},
{"Continuation pointer",cp},
diff --git a/lib/observer/src/crashdump_viewer.erl b/lib/observer/src/crashdump_viewer.erl
index 99329b94e2..ef14ba46e2 100644
--- a/lib/observer/src/crashdump_viewer.erl
+++ b/lib/observer/src/crashdump_viewer.erl
@@ -320,6 +320,8 @@ handle_call(general_info,_From,State=#state{file=File}) ->
"Some information might be missing."];
false -> []
end,
+ ets:insert(cdv_reg_proc_table,
+ {cdv_dump_node_name,GenInfo#general_info.node_name}),
{reply,{ok,GenInfo,TW},State#state{wordsize=WS, num_atoms=NumAtoms}};
handle_call({expand_binary,{Offset,Size,Pos}},_From,State=#state{file=File}) ->
Fd = open(File),
@@ -926,7 +928,7 @@ general_info(File) ->
N;
[] ->
case lookup_index(?no_distribution) of
- [_] -> "nonode@nohost";
+ [_] -> "'nonode@nohost'";
[] -> "unknown"
end
end,
@@ -1131,6 +1133,8 @@ all_procinfo(Fd,Fun,Proc,WS,LineHead) ->
"arity = " ++ Arity ->
%%! Temporary workaround
get_procinfo(Fd,Fun,Proc#proc{arity=Arity--"\r\n"},WS);
+ "Run queue" ->
+ get_procinfo(Fd,Fun,Proc#proc{run_queue=val(Fd)},WS);
"=" ++ _next_tag ->
Proc;
Other ->
@@ -1165,6 +1169,19 @@ parse_pid(Str) ->
{Pid,Rest} = parse_link(Str,[]),
{{Pid,Pid},Rest}.
+parse_monitor("{"++Str) ->
+ %% Named process
+ {Name,Node,Rest1} = parse_name_node(Str,[]),
+ Pid = get_pid_from_name(Name,Node),
+ case parse_link(string:strip(Rest1,left,$,),[]) of
+ {Ref,"}"++Rest2} ->
+ %% Bug in break.c - prints an extra "}" for remote
+ %% nodes... thus the strip
+ {{Pid,"{"++Name++","++Node++"} ("++Ref++")"},
+ string:strip(Rest2,left,$})};
+ {Ref,[]} ->
+ {{Pid,"{"++Name++","++Node++"} ("++Ref++")"},[]}
+ end;
parse_monitor(Str) ->
case parse_link(Str,[]) of
{Pid,","++Rest1} ->
@@ -1186,6 +1203,35 @@ parse_link([],Acc) ->
%% truncated
{lists:reverse(Acc),[]}.
+parse_name_node(","++Rest,Name) ->
+ parse_name_node(Rest,Name,[]);
+parse_name_node([H|T],Name) ->
+ parse_name_node(T,[H|Name]);
+parse_name_node([],Name) ->
+ %% truncated
+ {lists:reverse(Name),[],[]}.
+
+parse_name_node("}"++Rest,Name,Node) ->
+ {lists:reverse(Name),lists:reverse(Node),Rest};
+parse_name_node([H|T],Name,Node) ->
+ parse_name_node(T,Name,[H|Node]);
+parse_name_node([],Name,Node) ->
+ %% truncated
+ {lists:reverse(Name),lists:reverse(Node),[]}.
+
+get_pid_from_name(Name,Node) ->
+ case ets:lookup(cdv_reg_proc_table,cdv_dump_node_name) of
+ [{_,Node}] ->
+ case ets:lookup(cdv_reg_proc_table,Name) of
+ [{_,Pid}] when is_pid(Pid) ->
+ pid_to_list(Pid);
+ _ ->
+ "<unkonwn_pid>"
+ end;
+ _ ->
+ "<unknown_pid_other_node>"
+ end.
+
maybe_other_node(Id) ->
Channel =
case split($.,Id) of
diff --git a/lib/observer/src/crashdump_viewer.hrl b/lib/observer/src/crashdump_viewer.hrl
index 0e2eba6dee..47705d0da7 100644
--- a/lib/observer/src/crashdump_viewer.hrl
+++ b/lib/observer/src/crashdump_viewer.hrl
@@ -85,7 +85,9 @@
old_heap_top,
old_heap_end,
memory,
- stack_dump}).
+ stack_dump,
+ run_queue=?unknown
+ }).
-record(port,
{id,
diff --git a/lib/observer/src/observer_html_lib.erl b/lib/observer/src/observer_html_lib.erl
index c279218707..53197078cf 100644
--- a/lib/observer/src/observer_html_lib.erl
+++ b/lib/observer/src/observer_html_lib.erl
@@ -60,7 +60,8 @@ expandable_term_body(Heading,[],_Tab) ->
"StackDump" -> "No stack dump was found";
"Dictionary" -> "No dictionary was found";
"ProcState" -> "Information could not be retrieved,"
- " system messages may not be handled by this process."
+ " system messages may not be handled by this process.";
+ "SaslLog" -> "No log entry was found"
end];
expandable_term_body(Heading,Expanded,Tab) ->
Attr = "BORDER=0 CELLPADDING=0 CELLSPACING=1 WIDTH=100%",
@@ -102,7 +103,10 @@ expandable_term_body(Heading,Expanded,Tab) ->
element(1, lists:mapfoldl(fun(Entry, Even) ->
{proc_state(Tab, Entry,Even),
not Even}
- end, true, Expanded))]);
+ end, true, Expanded))]);
+ "SaslLog" ->
+ table(Attr,
+ [tr("BGCOLOR=white",[td("ALIGN=left", pre(href_proc_port(Expanded)))])]) ;
_ ->
table(Attr,
[tr(
diff --git a/lib/observer/src/observer_pro_wx.erl b/lib/observer/src/observer_pro_wx.erl
index 0be8c18893..026693ff56 100644
--- a/lib/observer/src/observer_pro_wx.erl
+++ b/lib/observer/src/observer_pro_wx.erl
@@ -578,7 +578,7 @@ get_row(From, Row, pid, Info) ->
end,
From ! {self(), Pid};
get_row(From, Row, Col, Info) ->
- Data = case Row > array:size(Info) of
+ Data = case Row >= array:size(Info) of
true ->
"";
false ->
diff --git a/lib/observer/src/observer_procinfo.erl b/lib/observer/src/observer_procinfo.erl
index 8e8a37fc93..2a840dc49e 100644
--- a/lib/observer/src/observer_procinfo.erl
+++ b/lib/observer/src/observer_procinfo.erl
@@ -43,6 +43,8 @@
-record(worker, {panel, callback}).
+-record(io, {rdata=""}).
+
start(Process, ParentFrame, Parent) ->
wx_object:start_link(?MODULE, [Process, ParentFrame, Parent], []).
@@ -69,6 +71,10 @@ init([Pid, ParentFrame, Parent]) ->
DictPage = init_panel(Notebook, "Dictionary", [Pid,Table], fun init_dict_page/3),
StackPage = init_panel(Notebook, "Stack Trace", [Pid], fun init_stack_page/2),
StatePage = init_panel(Notebook, "State", [Pid,Table], fun init_state_page/3),
+ Ps = case gen_server:call(observer, log_status) of
+ true -> [init_panel(Notebook, "Log", [Pid,Table], fun init_log_page/3)];
+ false -> []
+ end,
wxFrame:connect(Frame, close_window),
wxMenu:connect(Frame, command_menu_selected),
@@ -78,7 +84,7 @@ init([Pid, ParentFrame, Parent]) ->
pid=Pid,
frame=Frame,
notebook=Notebook,
- pages=[ProcessPage,MessagePage,DictPage,StackPage,StatePage],
+ pages=[ProcessPage,MessagePage,DictPage,StackPage,StatePage|Ps],
expand_table=Table
}}
catch error:{badrpc, _} ->
@@ -327,6 +333,26 @@ fetch_state_info2(Pid, M) ->
{badrpc,{'EXIT',{timeout, _}}} -> []
end.
+init_log_page(Parent, Pid, Table) ->
+ Win = observer_lib:html_window(Parent),
+ Update = fun() ->
+ Fd = spawn_link(fun() -> io_server() end),
+ rpc:call(node(Pid), rb, rescan, [[{start_log, Fd}]]),
+ rpc:call(node(Pid), rb, grep, [local_pid_str(Pid)]),
+ Logs = io_get_data(Fd),
+ %% Replace remote local pid notation to global notation
+ Pref = global_pid_node_pref(Pid),
+ ExpPid = re:replace(Logs,"<0\.","<" ++ Pref ++ ".",[global, {return, list}]),
+ %% Try to keep same look by removing blanks at right of rewritten PID
+ NbBlanks = length(Pref) - 1,
+ Re = "(<" ++ Pref ++ "\.[^>]{1,}>)[ ]{"++ integer_to_list(NbBlanks) ++ "}",
+ Look = re:replace(ExpPid, Re, "\\1", [global, {return, list}]),
+ Html = observer_html_lib:expandable_term("SaslLog", Look, Table),
+ wxHtmlWindow:setPage(Win, Html)
+ end,
+ Update(),
+ {Win, Update}.
+
create_menus(MenuBar) ->
Menus = [{"File", [#create_menu{id=?wxID_CLOSE, text="Close"}]},
{"View", [#create_menu{id=?REFRESH, text="Refresh\tCtrl-R"}]}],
@@ -409,3 +435,51 @@ filter_monitor_info() ->
Ms = proplists:get_value(monitors, Data),
[Pid || {process, Pid} <- Ms]
end.
+
+local_pid_str(Pid) ->
+ %% observer can observe remote nodes
+ %% There is no function to get the local
+ %% pid from the remote pid ...
+ %% So grep will fail to find remote pid in remote local log.
+ %% i.e. <4589.42.1> will not be found, but <0.42.1> will
+ %% Let's replace first integer by zero
+ "<0" ++ re:replace(pid_to_list(Pid),"\<([0-9]{1,})","",[{return, list}]).
+
+global_pid_node_pref(Pid) ->
+ %% Global PID node prefix : X of <X.Y.Z>
+ string:strip(string:sub_word(pid_to_list(Pid),1,$.),left,$<).
+
+
+io_get_data(Pid) ->
+ Pid ! {self(), get_data_and_close},
+ receive
+ {Pid, data, Data} -> lists:flatten(Data)
+ end.
+
+io_server() ->
+ io_server(#io{}).
+
+io_server(State) ->
+ receive
+ {io_request, From, ReplyAs, Request} ->
+ {_, Reply, NewState} = io_request(Request,State),
+ From ! {io_reply, ReplyAs, Reply},
+ io_server(NewState);
+ {Pid, get_data_and_close} ->
+ Pid ! {self(), data, lists:reverse(State#io.rdata)},
+ normal;
+ _Unknown ->
+ io_server(State)
+ end.
+
+io_request({put_chars, _Encoding, Chars}, State = #io{rdata=Data}) ->
+ {ok, ok, State#io{rdata=[Chars|Data]}};
+io_request({put_chars, Encoding, Module, Function, Args}, State) ->
+ try
+ io_request({put_chars, Encoding, apply(Module, Function, Args)}, State)
+ catch _:_ ->
+ {error, {error, Function}, State}
+ end;
+io_request(_Req, State) ->
+ %% io:format("~p: Unknown req: ~p ~n",[?LINE, _Req]),
+ {ok, {error, request}, State}.
diff --git a/lib/observer/src/observer_wx.erl b/lib/observer/src/observer_wx.erl
index c86f5ea916..54c4092a78 100644
--- a/lib/observer/src/observer_wx.erl
+++ b/lib/observer/src/observer_wx.erl
@@ -37,6 +37,7 @@
-define(ID_CONNECT, 2).
-define(ID_NOTEBOOK, 3).
-define(ID_CDV, 4).
+-define(ID_LOGVIEW, 5).
-define(FIRST_NODES_MENU_ID, 1000).
-define(LAST_NODES_MENU_ID, 2000).
@@ -60,7 +61,8 @@
active_tab,
node,
nodes,
- prev_node=""
+ prev_node="",
+ log = false
}).
start() ->
@@ -215,14 +217,17 @@ handle_event(#wx{event=#wxNotebook{type=command_notebook_page_changing}},
{noreply, State#state{active_tab=Pid}}
end;
-handle_event(#wx{event = #wxClose{}}, State) ->
- {stop, normal, State};
-
handle_event(#wx{id = ?ID_CDV, event = #wxCommand{type = command_menu_selected}}, State) ->
spawn(crashdump_viewer, start, []),
{noreply, State};
-handle_event(#wx{id = ?wxID_EXIT, event = #wxCommand{type = command_menu_selected}}, State) ->
+handle_event(#wx{event = #wxClose{}}, #state{log=LogOn} = State) ->
+ LogOn andalso rpc:block_call(State#state.node, rb, stop, []),
+ {stop, normal, State};
+
+handle_event(#wx{id = ?wxID_EXIT, event = #wxCommand{type = command_menu_selected}},
+ #state{log=LogOn} = State) ->
+ LogOn andalso rpc:block_call(State#state.node, rb, stop, []),
{stop, normal, State};
handle_event(#wx{id = ?wxID_HELP, event = #wxCommand{type = command_menu_selected}}, State) ->
@@ -300,12 +305,42 @@ handle_event(#wx{id = ?ID_PING, event = #wxCommand{type = command_menu_selected}
end,
{noreply, UpdState};
-handle_event(#wx{id = Id, event = #wxCommand{type = command_menu_selected}}, State)
- when Id > ?FIRST_NODES_MENU_ID, Id < ?LAST_NODES_MENU_ID ->
+handle_event(#wx{id = ?ID_LOGVIEW, event = #wxCommand{type = command_menu_selected}},
+ #state{frame = Frame, log = PrevLog, node = Node} = State) ->
+ try
+ ok = ensure_sasl_started(Node),
+ ok = ensure_mf_h_handler_used(Node),
+ ok = ensure_rb_mode(Node, PrevLog),
+ case PrevLog of
+ false ->
+ rpc:block_call(Node, rb, start, []),
+ set_status("Observer - " ++ atom_to_list(Node) ++ " (rb_server started)"),
+ {noreply, State#state{log=true}};
+ true ->
+ rpc:block_call(Node, rb, stop, []),
+ set_status("Observer - " ++ atom_to_list(Node) ++ " (rb_server stopped)"),
+ {noreply, State#state{log=false}}
+ end
+ catch
+ throw:Reason ->
+ create_txt_dialog(Frame, Reason, "Log view status", ?wxICON_ERROR),
+ {noreply, State}
+ end;
- Node = lists:nth(Id - ?FIRST_NODES_MENU_ID, State#state.nodes),
- UpdState = change_node_view(Node, State),
- {noreply, UpdState};
+handle_event(#wx{id = Id, event = #wxCommand{type = command_menu_selected}},
+ #state{nodes= Ns , node = PrevNode, log = PrevLog} = State)
+ when Id > ?FIRST_NODES_MENU_ID, Id < ?LAST_NODES_MENU_ID ->
+ Node = lists:nth(Id - ?FIRST_NODES_MENU_ID, Ns),
+ %% Close rb_server only if another node than current one selected
+ LState = case PrevLog of
+ true -> case Node == PrevNode of
+ false -> rpc:block_call(PrevNode, rb, stop, []),
+ State#state{log=false} ;
+ true -> State
+ end;
+ false -> State
+ end,
+ {noreply, change_node_view(Node, LState)};
handle_event(Event, State) ->
Pid = get_active_pid(State),
@@ -340,6 +375,9 @@ handle_call(stop, _, State = #state{frame = Frame}) ->
wxFrame:destroy(Frame),
{stop, normal, ok, State};
+handle_call(log_status, _From, State) ->
+ {reply, State#state.log, State};
+
handle_call(_Msg, _From, State) ->
{reply, ok, State}.
@@ -422,8 +460,7 @@ return_to_localnode(Frame, Node) ->
end.
create_txt_dialog(Frame, Msg, Title, Style) ->
- MD = wxMessageDialog:new(Frame, Msg, [{style, Style}]),
- wxMessageDialog:setTitle(MD, Title),
+ MD = wxMessageDialog:new(Frame, Msg, [{style, Style}, {caption,Title}]),
wxDialog:showModal(MD),
wxDialog:destroy(MD).
@@ -569,17 +606,19 @@ default_menus(NodesMenuItems) ->
false -> {"Nodes", NodesMenuItems ++
[#create_menu{id = ?ID_CONNECT, text = "Enable distribution"}]}
end,
+ LogMenu = {"Log", [#create_menu{id = ?ID_LOGVIEW, text = "Toggle log view"}]},
case os:type() =:= {unix, darwin} of
false ->
FileMenu = {"File", [CDV, Quit]},
HelpMenu = {"Help", [About,Help]},
- [FileMenu, NodeMenu, HelpMenu];
+ [FileMenu, NodeMenu, LogMenu, HelpMenu];
true ->
%% On Mac quit and about will be moved to the "default' place
%% automagicly, so just add them to a menu that always exist.
%% But not to the help menu for some reason
- {Tag, Menus} = FileMenu,
- [{Tag, Menus ++ [About]}, NodeMenu, {"&Help", [Help]}]
+
+ {Tag, Menus} = NodeMenu,
+ [{Tag, Menus ++ [Quit,About]}, LogMenu, {"&Help", [Help]}]
end.
clean_menus(Menus, MenuBar) ->
@@ -658,3 +697,59 @@ update_node_list(State = #state{menubar=MenuBar}) ->
end,
observer_lib:create_menu_item(Dist, NodeMenu, Index),
State#state{nodes = Nodes}.
+
+ensure_sasl_started(Node) ->
+ %% is sasl started ?
+ Apps = rpc:block_call(Node, application, which_applications, []),
+ case lists:keyfind(sasl, 1, Apps) of
+ false -> throw("Error: sasl application not started."),
+ error;
+ {sasl, _, _} -> ok
+ end.
+
+ensure_mf_h_handler_used(Node) ->
+ %% is log_mf_h used ?
+ Handlers = rpc:block_call(Node, gen_event, which_handlers, [error_logger]),
+ case lists:any(fun(L)-> L == log_mf_h end, Handlers) of
+ false -> throw("Error: log_mf_h handler not used in sasl."),
+ error;
+ true -> ok
+ end.
+
+ensure_rb_mode(Node, PrevLog) ->
+ ok = ensure_rb_module_loaded(Node),
+ ok = is_rb_compatible(Node),
+ ok = is_rb_server_running(Node, PrevLog),
+ ok.
+
+
+ensure_rb_module_loaded(Node) ->
+ %% Need to ensure that module is loaded in order to detect exported
+ %% functions on interactive nodes
+ case rpc:block_call(Node, code, ensure_loaded, [rb]) of
+ {badrpc, Reason} ->
+ throw("Error: badrpc - " ++ io_lib:format("~tp",[Reason]));
+ {error, Reason} ->
+ throw("Error: rb module load error - " ++ io_lib:format("~tp",[Reason]));
+ {module,rb} ->
+ ok
+ end.
+
+is_rb_compatible(Node) ->
+ %% Simply test that rb:log_list/0 is exported
+ case rpc:block_call(Node, erlang, function_exported, [rb, log_list, 0]) of
+ false -> throw("Error: Node's Erlang release must be at least R16B02.");
+ true -> ok
+ end.
+
+is_rb_server_running(Node, LogState) ->
+ %% If already started, somebody else may use it.
+ %% We can not use it too, as far log file would be overriden. Not fair.
+ case rpc:block_call(Node, erlang, whereis, [rb_server]) of
+ Pid when is_pid(Pid), (LogState == false) ->
+ throw("Error: rb_server is already started and maybe used by someone.");
+ Pid when is_pid(Pid) ->
+ ok;
+ undefined ->
+ ok
+ end.
diff --git a/lib/observer/test/observer_SUITE.erl b/lib/observer/test/observer_SUITE.erl
index 5cf719acb1..c69fdf4bdf 100644
--- a/lib/observer/test/observer_SUITE.erl
+++ b/lib/observer/test/observer_SUITE.erl
@@ -22,6 +22,8 @@
-include_lib("wx/include/wx.hrl").
-include_lib("observer/src/observer_tv.hrl").
+-define(ID_LOGVIEW, 5).
+
%% Test server specific exports
-export([all/0, suite/0,groups/0]).
-export([init_per_testcase/2, end_per_testcase/2,
@@ -44,8 +46,9 @@ all() ->
groups() ->
[{gui, [],
- [basic
- , process_win, table_win
+ [basic,
+ process_win,
+ table_win
]
}].
@@ -107,7 +110,7 @@ appup_file(Config) when is_list(Config) ->
basic(suite) -> [];
basic(doc) -> [""];
basic(Config) when is_list(Config) ->
- timer:send_after(100, "foobar"), %% Otherwise the timer sever gets added to procs
+ timer:send_after(100, "foobar"), %% Otherwise the timer server gets added to procs
ProcsBefore = processes(),
NumProcsBefore = length(ProcsBefore),
@@ -126,7 +129,7 @@ basic(Config) when is_list(Config) ->
timer:sleep(200),
ok = wxNotebook:advanceSelection(Notebook)
end,
- %% Just verify that we can toogle trough all pages
+ %% Just verify that we can toggle through all pages
[_|_] = [Check(N, false) || N <- lists:seq(1, Count)],
%% Cause it to resize
Frame = get_top_level_parent(Notebook),
@@ -214,10 +217,27 @@ test_page(Title, Window) ->
process_win(suite) -> [];
process_win(doc) -> [""];
process_win(Config) when is_list(Config) ->
+ % Stop SASL if already started
+ SaslStart = case whereis(sasl_sup) of
+ undefined -> false;
+ _ -> application:stop(sasl),
+ true
+ end,
+ % Define custom sasl and log_mf_h app vars
+ Privdir=?config(priv_dir,Config),
+ application:set_env(sasl, sasl_error_logger, tty),
+ application:set_env(sasl, error_logger_mf_dir, Privdir),
+ application:set_env(sasl, error_logger_mf_maxbytes, 1000),
+ application:set_env(sasl, error_logger_mf_maxfiles, 5),
+ application:start(sasl),
ok = observer:start(),
ObserverNB = setup_whitebox_testing(),
Parent = get_top_level_parent(ObserverNB),
- Frame = observer_procinfo:start(self(), Parent, self()),
+ % Activate log view
+ whereis(observer) ! #wx{id = ?ID_LOGVIEW, event = #wxCommand{type = command_menu_selected}},
+ timer:sleep(1000),
+ % Process window tests (use sasl_sup for a non empty Log tab)
+ Frame = observer_procinfo:start(whereis(sasl_sup), Parent, self()),
PIPid = wx_object:get_pid(Frame),
PIPid ! {get_debug_info, self()},
Notebook = receive {procinfo_debug, NB} -> NB end,
@@ -229,6 +249,11 @@ process_win(Config) when is_list(Config) ->
[_|_] = [Check(N) || N <- lists:seq(1, Count)],
PIPid ! #wx{event=#wxClose{type=close_window}},
observer:stop(),
+ application:stop(sasl),
+ case SaslStart of
+ true -> application:start(sasl);
+ false -> ok
+ end,
ok.
table_win(suite) -> [];
diff --git a/lib/os_mon/c_src/memsup.c b/lib/os_mon/c_src/memsup.c
index 409db84aa7..5dcab07dd8 100644
--- a/lib/os_mon/c_src/memsup.c
+++ b/lib/os_mon/c_src/memsup.c
@@ -104,7 +104,7 @@
#if !defined (__OpenBSD__) && !defined (__NetBSD__)
#include <vm/vm_param.h>
#endif
-#if defined (__FreeBSD__) || defined(__DragonFly__) || defined (__NetBSD__)
+#if defined (__FreeBSD__) || defined(__DragonFly__) || defined (__NetBSD__) || defined(__OpenBSD__)
#include <sys/vmmeter.h>
#endif
#endif
diff --git a/lib/parsetools/include/leexinc.hrl b/lib/parsetools/include/leexinc.hrl
index 938aef58f9..2657fdcfaa 100644
--- a/lib/parsetools/include/leexinc.hrl
+++ b/lib/parsetools/include/leexinc.hrl
@@ -44,6 +44,8 @@ string(Ics0, L0, Tcs, Ts) ->
%% Test for and remove the end token wrapper. Push back characters
%% are prepended to RestChars.
+-dialyzer({nowarn_function, string_cont/4}).
+
string_cont(Rest, Line, {token,T}, Ts) ->
string(Rest, Line, Rest, [T|Ts]);
string_cont(Rest, Line, {token,T,Push}, Ts) ->
@@ -113,6 +115,8 @@ token(S0, Ics0, L0, Tcs, Tlen0, Tline, A0, Alen0) ->
%% If we have a token or error then return done, else if we have a
%% skip_token then continue.
+-dialyzer({nowarn_function, token_cont/3}).
+
token_cont(Rest, Line, {token,T}) ->
{done,{ok,T,Line},Rest};
token_cont(Rest, Line, {token,T,Push}) ->
@@ -187,6 +191,8 @@ tokens(S0, Ics0, L0, Tcs, Tlen0, Tline, Ts, A0, Alen0) ->
%% a token then save it and continue, else if we have a skip_token
%% just continue.
+-dialyzer({nowarn_function, tokens_cont/4}).
+
tokens_cont(Rest, Line, {token,T}, Ts) ->
tokens(yystate(), Rest, Line, Rest, 0, Line, [T|Ts], reject, 0);
tokens_cont(Rest, Line, {token,T,Push}, Ts) ->
@@ -238,6 +244,8 @@ skip_tokens(S0, Ics0, L0, Tcs, Tlen0, Tline, Error, A0, Alen0) ->
%% Skip tokens until we have an end_token or error then return done
%% with the original rror.
+-dialyzer({nowarn_function, skip_cont/4}).
+
skip_cont(Rest, Line, {token,_T}, Error) ->
skip_tokens(yystate(), Rest, Line, Rest, 0, Line, Error, reject, 0);
skip_cont(Rest, Line, {token,_T,Push}, Error) ->
diff --git a/lib/parsetools/src/yecc.erl b/lib/parsetools/src/yecc.erl
index f4657663e6..3fcec73ce2 100644
--- a/lib/parsetools/src/yecc.erl
+++ b/lib/parsetools/src/yecc.erl
@@ -2064,11 +2064,13 @@ output_actions(St0, StateJumps, StateInfo) ->
SelS = [{State,Called} ||
{{State,_JActions}, {State,Called}} <-
lists:zip(StateJumps, lists:keysort(1, Sel))],
+ St05 =
+ fwrite(St0, <<"-dialyzer({nowarn_function, yeccpars2/7}).\n">>, []),
St10 = foldl(fun({State, Called}, St_0) ->
{State, #state_info{state_repr = IState}} =
lookup_state(StateInfo, State),
output_state_selection(St_0, State, IState, Called)
- end, St0, SelS),
+ end, St05, SelS),
St20 = fwrite(St10, <<"yeccpars2(Other, _, _, _, _, _, _) ->\n">>, []),
St = fwrite(St20,
?YECC_BUG(<<"{missing_state_in_action_table, Other}">>, []),
@@ -2089,7 +2091,8 @@ output_state_selection(St0, State, IState, Called) ->
[Comment, IState]).
output_state_actions(St, State, State, {Actions,jump_none}, SI) ->
- output_state_actions1(St, State, Actions, true, normal, SI);
+ St1 = output_state_actions_begin(St, State, Actions),
+ output_state_actions1(St1, State, Actions, true, normal, SI);
output_state_actions(St0, State, State, {Actions, Jump}, SI) ->
{Tag, To, Common} = Jump,
CS = case Tag of
@@ -2099,13 +2102,22 @@ output_state_actions(St0, State, State, {Actions, Jump}, SI) ->
St = output_state_actions1(St0, State, Actions, true, {to, CS}, SI),
if
To =:= State ->
- output_state_actions1(St, CS, Common, true, normal, SI);
+ St1 = output_state_actions_begin(St, State, Actions),
+ output_state_actions1(St1, CS, Common, true, normal, SI);
true ->
St
end;
output_state_actions(St, State, JState, _XActions, _SI) ->
fwrite(St, <<"%% yeccpars2_~w: see yeccpars2_~w\n\n">>, [State, JState]).
+output_state_actions_begin(St, State, Actions) ->
+ case [yes || {_, #reduce{}} <- Actions] of
+ [] ->
+ fwrite(St, <<"-dialyzer({nowarn_function, yeccpars2_~w/7}).\n">>,
+ [State]); % Only when yeccerror(T) is output.
+ _ -> St
+ end.
+
output_state_actions1(St, State, [], IsFirst, normal, _SI) ->
output_state_actions_fini(State, IsFirst, St);
output_state_actions1(St0, State, [], IsFirst, {to, ToS}, _SI) ->
diff --git a/lib/parsetools/test/yecc_SUITE.erl b/lib/parsetools/test/yecc_SUITE.erl
index d308d21f82..c18dc15e37 100644
--- a/lib/parsetools/test/yecc_SUITE.erl
+++ b/lib/parsetools/test/yecc_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2005-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2005-2014. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -340,8 +340,8 @@ syntax(Config) when is_list(Config) ->
{_,[{L1,_,{undefined_function,{yeccpars2_2_,1}}},
{L2,_,{bad_inline,{yeccpars2_2_,1}}}]}],
[]} = compile:file(Parserfile1, [basic_validation,return]),
- ?line L1 = 28 + SzYeccPre,
- ?line L2 = 35 + SzYeccPre
+ ?line L1 = 31 + SzYeccPre,
+ ?line L2 = 38 + SzYeccPre
end(),
%% Bad macro in action. OTP-7224.
@@ -358,8 +358,8 @@ syntax(Config) when is_list(Config) ->
{_,[{L1,_,{undefined_function,{yeccpars2_2_,1}}},
{L2,_,{bad_inline,{yeccpars2_2_,1}}}]}],
[]} = compile:file(Parserfile1, [basic_validation,return]),
- ?line L1 = 28 + SzYeccPre,
- ?line L2 = 35 + SzYeccPre
+ ?line L1 = 31 + SzYeccPre,
+ ?line L2 = 38 + SzYeccPre
end(),
%% Check line numbers. OTP-7224.
@@ -1619,8 +1619,8 @@ otp_7292(Config) when is_list(Config) ->
{L2,_,{bad_inline,{yeccpars2_2_,1}}}]}],
[{_,[{16,_,{unused_function,{foo,0}}}]}]} =
compile:file(Parserfile1, [basic_validation, return]),
- ?line L1 = 38 + SzYeccPre,
- ?line L2 = 45 + SzYeccPre
+ L1 = 41 + SzYeccPre,
+ L2 = 48 + SzYeccPre
end(),
YeccPre = filename:join(Dir, "yeccpre.hrl"),
@@ -1637,8 +1637,8 @@ otp_7292(Config) when is_list(Config) ->
{L2,_,{bad_inline,{yeccpars2_2_,1}}}]}],
[{_,[{16,_,{unused_function,{foo,0}}}]}]} =
compile:file(Parserfile1, [basic_validation, return]),
- ?line L1 = 37 + SzYeccPre,
- ?line L2 = 44 + SzYeccPre
+ ?line L1 = 40 + SzYeccPre,
+ ?line L2 = 47 + SzYeccPre
end(),
file:delete(YeccPre),
diff --git a/lib/public_key/doc/src/public_key.xml b/lib/public_key/doc/src/public_key.xml
index e3473f80d7..b86d0fe0ab 100644
--- a/lib/public_key/doc/src/public_key.xml
+++ b/lib/public_key/doc/src/public_key.xml
@@ -5,7 +5,7 @@
<header>
<copyright>
<year>2008</year>
- <year>2014</year>
+ <year>2015</year>
<holder>Ericsson AB, All Rights Reserved</holder>
</copyright>
<legalnotice>
@@ -127,6 +127,8 @@
affiliationChanged | superseded | cessationOfOperation |
certificateHold | privilegeWithdrawn | aACompromise</code></p>
+ <p><code>issuer_name() = {rdnSequence,[#'AttributeTypeAndValue'{}]} </code> </p>
+
<p><code>ssh_file() = openssh_public_key | rfc4716_public_key | known_hosts |
auth_keys</code></p>
@@ -368,8 +370,8 @@
<name>pkix_is_issuer(Cert, IssuerCert) -> boolean()</name>
<fsummary> Checks if <c>IssuerCert</c> issued <c>Cert</c> </fsummary>
<type>
- <v>Cert = der_encode() | #'OTPCertificate'{}</v>
- <v>IssuerCert = der_encode() | #'OTPCertificate'{}</v>
+ <v>Cert = der_encoded() | #'OTPCertificate'{}</v>
+ <v>IssuerCert = der_encoded() | #'OTPCertificate'{}</v>
</type>
<desc>
<p> Checks if <c>IssuerCert</c> issued <c>Cert</c> </p>
@@ -380,7 +382,7 @@
<name>pkix_is_fixed_dh_cert(Cert) -> boolean()</name>
<fsummary> Checks if a Certificate is a fixed Diffie-Hellman Cert.</fsummary>
<type>
- <v>Cert = der_encode() | #'OTPCertificate'{}</v>
+ <v>Cert = der_encoded() | #'OTPCertificate'{}</v>
</type>
<desc>
<p> Checks if a Certificate is a fixed Diffie-Hellman Cert.</p>
@@ -391,7 +393,7 @@
<name>pkix_is_self_signed(Cert) -> boolean()</name>
<fsummary> Checks if a Certificate is self signed.</fsummary>
<type>
- <v>Cert = der_encode() | #'OTPCertificate'{}</v>
+ <v>Cert = der_encoded() | #'OTPCertificate'{}</v>
</type>
<desc>
<p> Checks if a Certificate is self signed.</p>
@@ -402,24 +404,25 @@
<name>pkix_issuer_id(Cert, IssuedBy) -> {ok, IssuerID} | {error, Reason}</name>
<fsummary> Returns the issuer id.</fsummary>
<type>
- <v>Cert = der_encode() | #'OTPCertificate'{}</v>
+ <v>Cert = der_encoded() | #'OTPCertificate'{}</v>
<v>IssuedBy = self | other</v>
- <v>IssuerID = {integer(), {rdnSequence, [#'AttributeTypeAndValue'{}]}}</v>
+ <v>IssuerID = {integer(), issuer_name()}</v>
<d>The issuer id consists of the serial number and the issuers name.</d>
<v>Reason = term()</v>
- </type>
- <desc>
- <p> Returns the issuer id.</p>
- </desc>
+ </type>
+ <desc>
+ <p> Returns the issuer id.</p>
+ </desc>
</func>
-
+
+
<func>
<name>pkix_normalize_name(Issuer) -> Normalized</name>
<fsummary>Normalizes a issuer name so that it can be easily
compared to another issuer name. </fsummary>
<type>
- <v>Issuer = {rdnSequence,[#'AttributeTypeAndValue'{}]}</v>
- <v>Normalized = {rdnSequence, [#'AttributeTypeAndValue'{}]}</v>
+ <v>Issuer = issuer_name()</v>
+ <v>Normalized = issuer_name()</v>
</type>
<desc>
<p>Normalizes a issuer name so that it can be easily
@@ -431,13 +434,13 @@
<name>pkix_path_validation(TrustedCert, CertChain, Options) -> {ok, {PublicKeyInfo, PolicyTree}} | {error, {bad_cert, Reason}} </name>
<fsummary> Performs a basic path validation according to RFC 5280.</fsummary>
<type>
- <v> TrustedCert = #'OTPCertificate'{} | der_encode() | atom() </v>
+ <v> TrustedCert = #'OTPCertificate'{} | der_encoded() | atom() </v>
<d>Normally a trusted certificate but it can also be a path validation
error that can be discovered while
constructing the input to this function and that should be run through the <c>verify_fun</c>.
For example <c>unknown_ca </c> or <c>selfsigned_peer </c>
</d>
- <v> CertChain = [der_encode()]</v>
+ <v> CertChain = [der_encoded()]</v>
<d>A list of DER encoded certificates in trust order ending with the peer certificate.</d>
<v> Options = proplists:proplist()</v>
<v>PublicKeyInfo = {?'rsaEncryption' | ?'id-dsa',
@@ -527,6 +530,17 @@ fun(OtpCert :: #'OTPCertificate'{},
</desc>
</func>
+ <func>
+ <name>pkix_crl_issuer(CRL) -> issuer_name()</name>
+ <fsummary>Returns the issuer of the <c>CRL</c>.</fsummary>
+ <type>
+ <v>CRL = der_encoded() | #'CertificateList'{} </v>
+ </type>
+ <desc>
+ <p>Returns the issuer of the <c>CRL</c>.</p>
+ </desc>
+ </func>
+
<func>
<name>pkix_crls_validate(OTPCertificate, DPAndCRLs, Options) -> CRLStatus()</name>
<fsummary> Performs CRL validation.</fsummary>
@@ -574,9 +588,48 @@ fun(#'DistributionPoint'{}, #'CertificateList'{},
</taglist>
</desc>
</func>
+
+ <func>
+ <name>pkix_crl_verify(CRL, Cert) -> boolean()</name>
+ <fsummary> Verify that <c>Cert</c> is the <c> CRL</c> signer. </fsummary>
+ <type>
+ <v>CRL = der_encoded() | #'CertificateList'{} </v>
+ <v>Cert = der_encoded() | #'OTPCertificate'{} </v>
+ </type>
+ <desc>
+ <p>Verify that <c>Cert</c> is the <c>CRL</c> signer.</p>
+ </desc>
+ </func>
+ <func>
+ <name>pkix_dist_point(Cert) -> DistPoint</name>
+ <fsummary>Creates a distribution point for CRLs issued by the same issuer as <c>Cert</c>.</fsummary>
+ <type>
+ <v> Cert = der_encoded() | #'OTPCertificate'{} </v>
+ <v> DistPoint = #'DistributionPoint'{}</v>
+ </type>
+ <desc>
+ <p>Creates a distribution point for CRLs issued by the same issuer as <c>Cert</c>.
+ Can be used as input to <seealso
+ marker="#pkix_crls_validate-3">pkix_crls_validate/3 </seealso>
+ </p>
+ </desc>
+ </func>
+
+ <func>
+ <name>pkix_dist_points(Cert) -> DistPoints</name>
+ <fsummary> Extracts distribution points from the certificates extensions.</fsummary>
+ <type>
+ <v> Cert = der_encoded() | #'OTPCertificate'{} </v>
+ <v> DistPoints = [#'DistributionPoint'{}]</v>
+ </type>
+ <desc>
+ <p> Extracts distribution points from the certificates extensions.</p>
+ </desc>
+ </func>
+
<func>
- <name>pkix_sign(#'OTPTBSCertificate'{}, Key) -> der_encode()</name>
+ <name>pkix_sign(#'OTPTBSCertificate'{}, Key) -> der_encoded()</name>
<fsummary>Signs certificate.</fsummary>
<type>
<v>Key = rsa_public_key() | dsa_public_key()</v>
@@ -606,7 +659,7 @@ fun(#'DistributionPoint'{}, #'CertificateList'{},
<name>pkix_verify(Cert, Key) -> boolean()</name>
<fsummary> Verify pkix x.509 certificate signature.</fsummary>
<type>
- <v>Cert = der_encode()</v>
+ <v>Cert = der_encoded()</v>
<v>Key = rsa_public_key() | dsa_public_key()</v>
</type>
<desc>
diff --git a/lib/public_key/src/pubkey_cert.erl b/lib/public_key/src/pubkey_cert.erl
index ae517ca642..8b11538499 100644
--- a/lib/public_key/src/pubkey_cert.erl
+++ b/lib/public_key/src/pubkey_cert.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2014. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -28,8 +28,9 @@
validate_issuer/4, validate_names/6,
validate_extensions/4,
normalize_general_name/1, is_self_signed/1,
- is_issuer/2, issuer_id/2, is_fixed_dh_cert/1,
- verify_data/1, verify_fun/4, select_extension/2, match_name/3,
+ is_issuer/2, issuer_id/2, distribution_points/1,
+ is_fixed_dh_cert/1, verify_data/1, verify_fun/4,
+ select_extension/2, match_name/3,
extensions_list/1, cert_auth_key_id/1, time_str_2_gregorian_sec/1]).
-define(NULL, 0).
@@ -272,6 +273,16 @@ issuer_id(Otpcert, self) ->
SerialNr = TBSCert#'OTPTBSCertificate'.serialNumber,
{ok, {SerialNr, normalize_general_name(Issuer)}}.
+distribution_points(Otpcert) ->
+ TBSCert = Otpcert#'OTPCertificate'.tbsCertificate,
+ Extensions = extensions_list(TBSCert#'OTPTBSCertificate'.extensions),
+ case select_extension(?'id-ce-cRLDistributionPoints', Extensions) of
+ undefined ->
+ [];
+ #'Extension'{extnValue = Value} ->
+ Value
+ end.
+
%%--------------------------------------------------------------------
-spec is_fixed_dh_cert(#'OTPCertificate'{}) -> boolean().
%%
@@ -296,7 +307,9 @@ is_fixed_dh_cert(#'OTPCertificate'{tbsCertificate =
%% --------------------------------------------------------------------
verify_fun(Otpcert, Result, UserState0, VerifyFun) ->
case VerifyFun(Otpcert, Result, UserState0) of
- {valid,UserState} ->
+ {valid, UserState} ->
+ UserState;
+ {valid_peer, UserState} ->
UserState;
{fail, Reason} ->
case Reason of
diff --git a/lib/public_key/src/pubkey_crl.erl b/lib/public_key/src/pubkey_crl.erl
index f0df4bc3f2..488cc97c70 100644
--- a/lib/public_key/src/pubkey_crl.erl
+++ b/lib/public_key/src/pubkey_crl.erl
@@ -41,10 +41,10 @@ validate(OtpCert, OtherDPCRLs, DP, {DerCRL, CRL}, {DerDeltaCRL, DeltaCRL},
CRLIssuer = TBSCRL#'TBSCertList'.issuer,
AltNames = case pubkey_cert:select_extension(?'id-ce-subjectAltName',
TBSCert#'OTPTBSCertificate'.extensions) of
- undefined ->
- [];
- Ext ->
- Ext#'Extension'.extnValue
+ #'Extension'{extnValue = Value} ->
+ Value;
+ _ ->
+ []
end,
revoked_status(DP, IDP, {directoryName, CRLIssuer},
[ {directoryName, CertIssuer} | AltNames], SerialNumber, Revoked,
diff --git a/lib/public_key/src/public_key.erl b/lib/public_key/src/public_key.erl
index 1bbf4ef416..a0a87e5351 100644
--- a/lib/public_key/src/public_key.erl
+++ b/lib/public_key/src/public_key.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -46,7 +46,11 @@
pkix_normalize_name/1,
pkix_path_validation/3,
ssh_decode/2, ssh_encode/2,
- pkix_crls_validate/3
+ pkix_crls_validate/3,
+ pkix_dist_point/1,
+ pkix_dist_points/1,
+ pkix_crl_verify/2,
+ pkix_crl_issuer/1
]).
-export_type([public_key/0, private_key/0, pem_entry/0,
@@ -470,6 +474,45 @@ verify(DigestOrPlainText, sha = DigestType, Signature, {Key, #'Dss-Parms'{p = P
crypto:verify(dss, DigestType, DigestOrPlainText, Signature, [P, Q, G, Key]).
%%--------------------------------------------------------------------
+-spec pkix_dist_point(der_encoded() | #'OTPCertificate'{}) ->
+ #'DistributionPoint'{}.
+%% Description: Creates a distribution point for CRLs issued by the same issuer as <c>Cert</c>.
+%%--------------------------------------------------------------------
+pkix_dist_point(OtpCert) when is_binary(OtpCert) ->
+ pkix_dist_point(pkix_decode_cert(OtpCert, otp));
+pkix_dist_point(OtpCert) ->
+ Issuer = public_key:pkix_normalize_name(
+ pubkey_cert_records:transform(
+ OtpCert#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.issuer, encode)),
+
+ TBSCert = OtpCert#'OTPCertificate'.tbsCertificate,
+ Extensions = pubkey_cert:extensions_list(TBSCert#'OTPTBSCertificate'.extensions),
+ AltNames = case pubkey_cert:select_extension(?'id-ce-issuerAltName', Extensions) of
+ undefined ->
+ [];
+ #'Extension'{extnValue = Value} ->
+ Value
+ end,
+ Point = {fullName, [{directoryName, Issuer} | AltNames]},
+ #'DistributionPoint'{cRLIssuer = asn1_NOVALUE,
+ reasons = asn1_NOVALUE,
+ distributionPoint = Point}.
+%%--------------------------------------------------------------------
+-spec pkix_dist_points(der_encoded() | #'OTPCertificate'{}) ->
+ [#'DistributionPoint'{}].
+%% Description: Extracts distributionpoints specified in the certificates extensions.
+%%--------------------------------------------------------------------
+pkix_dist_points(OtpCert) when is_binary(OtpCert) ->
+ pkix_dist_points(pkix_decode_cert(OtpCert, otp));
+pkix_dist_points(OtpCert) ->
+ Value = pubkey_cert:distribution_points(OtpCert),
+ lists:foldl(fun(Point, Acc0) ->
+ DistPoint = pubkey_cert_records:transform(Point, decode),
+ [DistPoint | Acc0]
+ end,
+ [], Value).
+
+%%--------------------------------------------------------------------
-spec pkix_sign(#'OTPTBSCertificate'{},
rsa_private_key() | dsa_private_key()) -> Der::binary().
%%
@@ -511,6 +554,25 @@ pkix_verify(DerCert, Key = {#'ECPoint'{}, _})
verify(PlainText, DigestType, Signature, Key).
%%--------------------------------------------------------------------
+-spec pkix_crl_verify(CRL::binary() | #'CertificateList'{}, Cert::binary() | #'OTPCertificate'{}) -> boolean().
+%%
+%% Description: Verify that Cert is the CRL signer.
+%%--------------------------------------------------------------------
+pkix_crl_verify(CRL, Cert) when is_binary(CRL) ->
+ pkix_crl_verify(der_decode('CertificateList', CRL), Cert);
+pkix_crl_verify(CRL, Cert) when is_binary(Cert) ->
+ pkix_crl_verify(CRL, pkix_decode_cert(Cert, otp));
+pkix_crl_verify(#'CertificateList'{} = CRL, #'OTPCertificate'{} = Cert) ->
+ TBSCert = Cert#'OTPCertificate'.tbsCertificate,
+ PublicKeyInfo = TBSCert#'OTPTBSCertificate'.subjectPublicKeyInfo,
+ PublicKey = PublicKeyInfo#'OTPSubjectPublicKeyInfo'.subjectPublicKey,
+ AlgInfo = PublicKeyInfo#'OTPSubjectPublicKeyInfo'.algorithm,
+ PublicKeyParams = AlgInfo#'PublicKeyAlgorithm'.parameters,
+ pubkey_crl:verify_crl_signature(CRL,
+ der_encode('CertificateList', CRL),
+ PublicKey, PublicKeyParams).
+
+%%--------------------------------------------------------------------
-spec pkix_is_issuer(Cert :: der_encoded()| #'OTPCertificate'{} | #'CertificateList'{},
IssuerCert :: der_encoded()|
#'OTPCertificate'{}) -> boolean().
@@ -564,15 +626,21 @@ pkix_is_fixed_dh_cert(Cert) when is_binary(Cert) ->
%
%% Description: Returns the issuer id.
%%--------------------------------------------------------------------
-pkix_issuer_id(#'OTPCertificate'{} = OtpCert, self) ->
- pubkey_cert:issuer_id(OtpCert, self);
-
-pkix_issuer_id(#'OTPCertificate'{} = OtpCert, other) ->
- pubkey_cert:issuer_id(OtpCert, other);
+pkix_issuer_id(Cert, Signed)->
+ pkix_issuer_id(Cert, Signed, decode).
-pkix_issuer_id(Cert, Signed) when is_binary(Cert) ->
- OtpCert = pkix_decode_cert(Cert, otp),
- pkix_issuer_id(OtpCert, Signed).
+%%--------------------------------------------------------------------
+-spec pkix_crl_issuer(CRL::binary()| #'CertificateList'{}) ->
+ {rdnSequence,
+ [#'AttributeTypeAndValue'{}]}.
+%
+%% Description: Returns the issuer.
+%%--------------------------------------------------------------------
+pkix_crl_issuer(CRL) when is_binary(CRL) ->
+ pkix_crl_issuer(der_decode('CertificateList', CRL));
+pkix_crl_issuer(#'CertificateList'{} = CRL) ->
+ pubkey_cert_records:transform(
+ CRL#'CertificateList'.tbsCertList#'TBSCertList'.issuer, decode).
%%--------------------------------------------------------------------
-spec pkix_normalize_name({rdnSequence,
@@ -921,3 +989,18 @@ ec_key({PubKey, PrivateKey}, Params) ->
privateKey = binary_to_list(PrivateKey),
parameters = Params,
publicKey = {0, PubKey}}.
+
+pkix_issuer_id(#'OTPCertificate'{} = OtpCert, Signed, decode) when (Signed == self) or
+ (Signed == other) ->
+ pubkey_cert:issuer_id(OtpCert, Signed);
+pkix_issuer_id(#'OTPCertificate'{} = OtpCert, Signed, encode) when (Signed == self) or
+ (Signed == other) ->
+ case pubkey_cert:issuer_id(OtpCert, Signed) of
+ {ok, {Serial, Issuer}} ->
+ {ok, {Serial, pubkey_cert_records:transform(Issuer, encode)}};
+ Error ->
+ Error
+ end;
+pkix_issuer_id(Cert, Signed, Decode) when is_binary(Cert) ->
+ OtpCert = pkix_decode_cert(Cert, otp),
+ pkix_issuer_id(OtpCert, Signed, Decode).
diff --git a/lib/public_key/test/public_key_SUITE.erl b/lib/public_key/test/public_key_SUITE.erl
index 163f5f4413..40c28e86b3 100644
--- a/lib/public_key/test/public_key_SUITE.erl
+++ b/lib/public_key/test/public_key_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -42,7 +42,7 @@ all() ->
encrypt_decrypt,
{group, sign_verify},
pkix, pkix_countryname, pkix_emailaddress, pkix_path_validation,
- pkix_iso_rsa_oid, pkix_iso_dsa_oid].
+ pkix_iso_rsa_oid, pkix_iso_dsa_oid, pkix_crl].
groups() ->
[{pem_decode_encode, [], [dsa_pem, rsa_pem, encrypted_pem,
@@ -712,6 +712,42 @@ pkix_iso_dsa_oid(Config) when is_list(Config) ->
{_, dsa} = public_key:pkix_sign_types(SigAlg#'SignatureAlgorithm'.algorithm).
%%--------------------------------------------------------------------
+
+pkix_crl() ->
+ [{doc, "test pkix_crl_* functions"}].
+
+pkix_crl(Config) when is_list(Config) ->
+ Datadir = ?config(data_dir, Config),
+ {ok, PemCRL} = file:read_file(filename:join(Datadir, "idp_crl.pem")),
+ [{_, CRL, _}] = public_key:pem_decode(PemCRL),
+
+ {ok, IDPPemCert} = file:read_file(filename:join(Datadir, "idp_cert.pem")),
+ [{_, IDPCert, _}] = public_key:pem_decode(IDPPemCert),
+
+ {ok, SignPemCert} = file:read_file(filename:join(Datadir, "crl_signer.pem")),
+ [{_, SignCert, _}] = public_key:pem_decode(SignPemCert),
+
+ OTPIDPCert = public_key:pkix_decode_cert(IDPCert, otp),
+ OTPSignCert = public_key:pkix_decode_cert(SignCert, otp),
+ ERLCRL = public_key:der_decode('CertificateList',CRL),
+
+ {rdnSequence,_} = public_key:pkix_crl_issuer(CRL),
+ {rdnSequence,_} = public_key:pkix_crl_issuer(ERLCRL),
+
+ true = public_key:pkix_crl_verify(CRL, SignCert),
+ true = public_key:pkix_crl_verify(ERLCRL, OTPSignCert),
+
+ [#'DistributionPoint'{}|_] = public_key:pkix_dist_points(IDPCert),
+ [#'DistributionPoint'{}|_] = public_key:pkix_dist_points(OTPIDPCert),
+
+ #'DistributionPoint'{cRLIssuer = asn1_NOVALUE,
+ reasons = asn1_NOVALUE,
+ distributionPoint = Point} = public_key:pkix_dist_point(IDPCert),
+ #'DistributionPoint'{cRLIssuer = asn1_NOVALUE,
+ reasons = asn1_NOVALUE,
+ distributionPoint = Point} = public_key:pkix_dist_point(OTPIDPCert).
+
+%%--------------------------------------------------------------------
%% Internal functions ------------------------------------------------
%%--------------------------------------------------------------------
asn1_encode_decode({Asn1Type, Der, not_encrypted} = Entry) ->
diff --git a/lib/public_key/test/public_key_SUITE_data/crl_signer.pem b/lib/public_key/test/public_key_SUITE_data/crl_signer.pem
new file mode 100644
index 0000000000..d77f86b45d
--- /dev/null
+++ b/lib/public_key/test/public_key_SUITE_data/crl_signer.pem
@@ -0,0 +1,25 @@
+-----BEGIN CERTIFICATE-----
+MIID8zCCAtugAwIBAgIJAKU8w89SmyPyMA0GCSqGSIb3DQEBBAUAMIGGMREwDwYD
+VQQDEwhlcmxhbmdDQTETMBEGA1UECxMKRXJsYW5nIE9UUDEUMBIGA1UEChMLRXJp
+Y3Nzb24gQUIxEjAQBgNVBAcTCVN0b2NraG9sbTELMAkGA1UEBhMCU0UxJTAjBgkq
+hkiG9w0BCQEWFnBldGVyQGVyaXguZXJpY3Nzb24uc2UwHhcNMTUwMjIzMTMyNTMx
+WhcNMTUwMzI1MTMyNTMxWjCBhjERMA8GA1UEAxMIZXJsYW5nQ0ExEzARBgNVBAsT
+CkVybGFuZyBPVFAxFDASBgNVBAoTC0VyaWNzc29uIEFCMRIwEAYDVQQHEwlTdG9j
+a2hvbG0xCzAJBgNVBAYTAlNFMSUwIwYJKoZIhvcNAQkBFhZwZXRlckBlcml4LmVy
+aWNzc29uLnNlMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAyzwkmKzy
+WTLOafHmgqZVENdt3OYECPA4BamVKyEdi8zgXI0S71wzPZ+XvuGbHDTBzsTHf71L
+xRQgoG30tv5jqWSlfh8iyS6fO+FHxBKd+xg6hLJXk5PCUa5X1D4BO8B4aapEzev+
+T8+pTaOLeVPdfGfKp0yWF50eCpdSF/kMCCIIA8QNSahfcwuLbEEzUNZof6YPZBNm
+e+XUMXCjpb/mU7krfu8nLaspG1HgxQqErEEBzGJE7mguqSVETK/xpGXEMTNIuj8N
+ziFrfqAezDob3z48xHUaHKZRBb9NIxWIjVxkTYaqOtf9UNCT96CHeZ7rk9iNscQu
+USabMIamFY8cNQIDAQABo2IwYDAPBgNVHRMBAf8EBTADAQH/MAsGA1UdDwQEAwIB
+BjAdBgNVHQ4EFgQUm2M3f6UBEIsHI1HIvphbBz60RsAwIQYDVR0RBBowGIEWcGV0
+ZXJAZXJpeC5lcmljc3Nvbi5zZTANBgkqhkiG9w0BAQQFAAOCAQEAPmm0V36HZySF
+BoV03DGyeFUSeMtO0DO058NaXXv2VNPpUXT72Mt1ovXNvVFcReggb01polF7TFFI
+4NRb6qbsLPxny29Clf/9WKY4zDhbb2MIy8yueoOyyeNQtrzY+iQjo4q9U+Aa6xj1
+pxmG1URDfOmCgX33ItCrZXFGa4ic0HrbWgJMDNo4lSOiio8bl3IYN4vBcobRfhDs
+pw5jochE5ZpPh4i76Pg6D99EFkNaLyQioWEu4n2OxR0EBSFLJkVJQ0alUx18AKio
+bje+h5nzRgTm5HApYzcorF57KfUKPDaW1Q6tRckRyHApueDuK8p49ITQE71lmkLc
+ywxoJMrNnA==
+-----END CERTIFICATE-----
+
diff --git a/lib/public_key/test/public_key_SUITE_data/idp_cert.pem b/lib/public_key/test/public_key_SUITE_data/idp_cert.pem
new file mode 100644
index 0000000000..c2afc56a3a
--- /dev/null
+++ b/lib/public_key/test/public_key_SUITE_data/idp_cert.pem
@@ -0,0 +1,30 @@
+-----BEGIN CERTIFICATE-----
+MIIFGjCCBAKgAwIBAgIBAzANBgkqhkiG9w0BAQQFADCBgzEOMAwGA1UEAxMFb3Rw
+Q0ExEzARBgNVBAsTCkVybGFuZyBPVFAxFDASBgNVBAoTC0VyaWNzc29uIEFCMQsw
+CQYDVQQGEwJTRTESMBAGA1UEBxMJU3RvY2tob2xtMSUwIwYJKoZIhvcNAQkBFhZw
+ZXRlckBlcml4LmVyaWNzc29uLnNlMB4XDTE1MDIyMzEzMjUzMVoXDTI1MDEwMTEz
+MjUzMVowgYQxDzANBgNVBAMTBnNlcnZlcjETMBEGA1UECxMKRXJsYW5nIE9UUDEU
+MBIGA1UEChMLRXJpY3Nzb24gQUIxCzAJBgNVBAYTAlNFMRIwEAYDVQQHEwlTdG9j
+a2hvbG0xJTAjBgkqhkiG9w0BCQEWFnBldGVyQGVyaXguZXJpY3Nzb24uc2UwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDK8EDdNZEebdfxb57e3UA8uTCq
+TsFtJv5tyjnZtSFsGDrwrZYjRMOCJFh8Yv6Ddq4mZiAvUCJxMzW4zVzraMmmQC8z
+Hi3xQyuIq2UCW3ESxLvchCcuSjNOWke0z+rXHzA8Yz9y1fqhhO6AF8q5lLwGo+VQ
+sJkVV8QwB9UXZN4pAc3zTeqZkGCrNY/ZIgtCrk4jw7sY/gumS8BjhXCYGyFZRDvX
+jzIXQx6jn7/2huNbEAiBXbYYAMd7OEwhpHHAWOVA6g+/TNydgRO3W4xVmlEhDpYs
+bnMV/Tq570E1bhz1XWb642K2MnxI74g8FXmhN6x6P8d4zU/eFcs+gxO0X6KzAgMB
+AAGjggGUMIIBkDAJBgNVHRMEAjAAMAsGA1UdDwQEAwIF4DAdBgNVHQ4EFgQUo8dr
+DDQXK25dB6qMY8dNIjAKIPEwgbMGA1UdIwSBqzCBqIAU5YMIq7A5eYQhQsHsc/XC
+7GeZ+kuhgYykgYkwgYYxETAPBgNVBAMTCGVybGFuZ0NBMRMwEQYDVQQLEwpFcmxh
+bmcgT1RQMRQwEgYDVQQKEwtFcmljc3NvbiBBQjESMBAGA1UEBxMJU3RvY2tob2xt
+MQswCQYDVQQGEwJTRTElMCMGCSqGSIb3DQEJARYWcGV0ZXJAZXJpeC5lcmljc3Nv
+bi5zZYIBATAhBgNVHREEGjAYgRZwZXRlckBlcml4LmVyaWNzc29uLnNlMCEGA1Ud
+EgQaMBiBFnBldGVyQGVyaXguZXJpY3Nzb24uc2UwWwYDVR0fBFQwUjAkoCKgIIYe
+aHR0cDovL2xvY2FsaG9zdC9vdHBDQS9jcmwucGVtMCqgKKAmhiRodHRwOi8vbG9j
+YWxob3N0OjM3ODEzL290cENBL2NybC5wZW0wDQYJKoZIhvcNAQEEBQADggEBACwq
+o4nQTTereSIL8ZLQHweJKXYstTaZrRrAaoRUe9oClY7H++zXmMa8iZvUqqdT3fXW
+4KMXXyoB1o+cLxLnAPKOiFFL9rcbaeAMxZMIrTaFDQsOXAPVqJLSWWS5I5LsNvS6
+MlB6O6+0binTyilDKg683VV9nKNiNdL8WzGa5ig+HvK6xUpJwpOTmDmfdg09zQ+8
+aCbJrthXg0tNnGIorttAd2wFvmLUezoJrlfwLChB0M/qa+RVRCFMiPvkWupo5eVK
+Malwpz2xp2rAUlb6qQY7eI6lV8JsVK06QxBmUHP68Y9kYT5/gy5ketjOB0Ypin05
+6+3VrZKFxrkqKaEoL50=
+-----END CERTIFICATE-----
diff --git a/lib/public_key/test/public_key_SUITE_data/idp_crl.pem b/lib/public_key/test/public_key_SUITE_data/idp_crl.pem
new file mode 100644
index 0000000000..0872279501
--- /dev/null
+++ b/lib/public_key/test/public_key_SUITE_data/idp_crl.pem
@@ -0,0 +1,18 @@
+-----BEGIN X509 CRL-----
+MIIC3TCCAcUCAQEwDQYJKoZIhvcNAQEEBQAwgYYxETAPBgNVBAMTCGVybGFuZ0NB
+MRMwEQYDVQQLEwpFcmxhbmcgT1RQMRQwEgYDVQQKEwtFcmljc3NvbiBBQjESMBAG
+A1UEBxMJU3RvY2tob2xtMQswCQYDVQQGEwJTRTElMCMGCSqGSIb3DQEJARYWcGV0
+ZXJAZXJpeC5lcmljc3Nvbi5zZRcNMTUwMjIzMTMyNTMxWhcNMTUwMjI0MTMyNTMx
+WqCCAQgwggEEMIG7BgNVHSMEgbMwgbCAFJtjN3+lARCLByNRyL6YWwc+tEbAoYGM
+pIGJMIGGMREwDwYDVQQDEwhlcmxhbmdDQTETMBEGA1UECxMKRXJsYW5nIE9UUDEU
+MBIGA1UEChMLRXJpY3Nzb24gQUIxEjAQBgNVBAcTCVN0b2NraG9sbTELMAkGA1UE
+BhMCU0UxJTAjBgkqhkiG9w0BCQEWFnBldGVyQGVyaXguZXJpY3Nzb24uc2WCCQCl
+PMPPUpsj8jA4BgNVHRwBAf8ELjAsoCqgKIYmaHR0cDovL2xvY2FsaG9zdDo4MDAw
+L2VybGFuZ0NBL2NybC5wZW0wCgYDVR0UBAMCAQEwDQYJKoZIhvcNAQEEBQADggEB
+AE9WKJhW1oivBEE91akeDcYCtSVp98F7DxzQyJTBLQJGMEXSg8G/oAp64F4qs3oV
+LXS5YFYwxjD9tXByGVEJoIUUMtfMeCvZMgd2V8mBlAJiyHkTrFFA8PgBv+htrJji
+nrheAhrEedqZbqwmrcU34h9fWHp0Zl6UDYyF3I/S0/5ilIz3DvNZ9SBfKKt3DYeW
+hon7qpNo6YrtEzbXyOaa2mFX9c1w39LBZ1FdY0jEzUfh2eImBLxnBjZArNxzYuU8
+a+lNMjc6JUAJwITS6C1YfI4ECsqXe0K/n90pMcm/jgiGFCZhVbXq+Nrm/24qPKBA
+zqoNos7aV7LEYLYOjknaIhY=
+-----END X509 CRL-----
diff --git a/lib/runtime_tools/src/dbg.erl b/lib/runtime_tools/src/dbg.erl
index 186563ab74..c2de57d40b 100644
--- a/lib/runtime_tools/src/dbg.erl
+++ b/lib/runtime_tools/src/dbg.erl
@@ -778,50 +778,50 @@ tracer_init(Handler, HandlerData) ->
tracer_loop(Handler, HandlerData).
tracer_loop(Handler, Hdata) ->
- receive
- Msg ->
- %% Don't match in receive to avoid giving EXIT message higher
- %% priority than the trace messages.
- case Msg of
- {'EXIT',_Pid,_Reason} ->
- ok;
- Trace ->
- NewData = recv_all_traces(Trace, Handler, Hdata),
- tracer_loop(Handler, NewData)
- end
+ {State, Suspended, Traces} = recv_all_traces(),
+ NewHdata = handle_traces(Suspended, Traces, Handler, Hdata),
+ case State of
+ done ->
+ exit(normal);
+ loop ->
+ tracer_loop(Handler, NewHdata)
end.
-
-recv_all_traces(Trace, Handler, Hdata) ->
- Suspended = suspend(Trace, []),
- recv_all_traces(Suspended, Handler, Hdata, [Trace]).
-recv_all_traces(Suspended0, Handler, Hdata, Traces) ->
+recv_all_traces() ->
+ recv_all_traces([], [], infinity).
+
+recv_all_traces(Suspended0, Traces, Timeout) ->
receive
Trace when is_tuple(Trace), element(1, Trace) == trace ->
Suspended = suspend(Trace, Suspended0),
- recv_all_traces(Suspended, Handler, Hdata, [Trace|Traces]);
+ recv_all_traces(Suspended, [Trace|Traces], 0);
Trace when is_tuple(Trace), element(1, Trace) == trace_ts ->
Suspended = suspend(Trace, Suspended0),
- recv_all_traces(Suspended, Handler, Hdata, [Trace|Traces]);
+ recv_all_traces(Suspended, [Trace|Traces], 0);
Trace when is_tuple(Trace), element(1, Trace) == seq_trace ->
Suspended = suspend(Trace, Suspended0),
- recv_all_traces(Suspended, Handler, Hdata, [Trace|Traces]);
+ recv_all_traces(Suspended, [Trace|Traces], 0);
Trace when is_tuple(Trace), element(1, Trace) == drop ->
Suspended = suspend(Trace, Suspended0),
- recv_all_traces(Suspended, Handler, Hdata, [Trace|Traces]);
+ recv_all_traces(Suspended, [Trace|Traces], 0);
+ {'EXIT', _Pid, _Reason} ->
+ {done, Suspended0, Traces};
Other ->
%%% Is this really a good idea?
io:format(user,"** tracer received garbage: ~p~n", [Other]),
- recv_all_traces(Suspended0, Handler, Hdata, Traces)
- after 0 ->
- case catch invoke_handler(Traces, Handler, Hdata) of
- {'EXIT',Reason} ->
- resume(Suspended0),
- exit({trace_handler_crashed,Reason});
- NewHdata ->
- resume(Suspended0),
- NewHdata
- end
+ recv_all_traces(Suspended0, Traces, Timeout)
+ after Timeout ->
+ {loop, Suspended0, Traces}
+ end.
+
+handle_traces(Suspended, Traces, Handler, Hdata) ->
+ case catch invoke_handler(Traces, Handler, Hdata) of
+ {'EXIT',Reason} ->
+ resume(Suspended),
+ exit({trace_handler_crashed,Reason});
+ NewHdata ->
+ resume(Suspended),
+ NewHdata
end.
invoke_handler([Tr|Traces], Handler, Hdata0) ->
diff --git a/lib/runtime_tools/test/dbg_SUITE.erl b/lib/runtime_tools/test/dbg_SUITE.erl
index dfae52ed1d..0bcbd67d05 100644
--- a/lib/runtime_tools/test/dbg_SUITE.erl
+++ b/lib/runtime_tools/test/dbg_SUITE.erl
@@ -25,7 +25,7 @@
ip_port/1, file_port/1, file_port2/1, file_port_schedfix/1,
ip_port_busy/1, wrap_port/1, wrap_port_time/1,
with_seq_trace/1, dead_suspend/1, local_trace/1,
- saved_patterns/1]).
+ saved_patterns/1, tracer_exit_on_stop/1]).
-export([init_per_testcase/2, end_per_testcase/2]).
-export([tracee1/1, tracee2/1]).
-export([dummy/0, exported/1]).
@@ -47,7 +47,7 @@ all() ->
[big, tiny, simple, message, distributed, ip_port,
file_port, file_port2, file_port_schedfix, ip_port_busy,
wrap_port, wrap_port_time, with_seq_trace, dead_suspend,
- local_trace, saved_patterns].
+ local_trace, saved_patterns, tracer_exit_on_stop].
groups() ->
[].
@@ -742,6 +742,38 @@ run_dead_suspend() ->
dummy() ->
ok.
+%% Test that a tracer process does not ignore an exit signal message when it has
+%% received (but not handled) trace messages
+tracer_exit_on_stop(_) ->
+ %% Tracer blocks waiting for fun to complete so that the trace message and
+ %% the exit signal message from the dbg process are in its message queue.
+ Fun = fun() ->
+ ?MODULE:dummy(),
+ Ref = erlang:trace_delivered(self()),
+ receive {trace_delivered, _, Ref} -> stop() end
+ end,
+ {ok, _} = dbg:tracer(process, {fun spawn_once_handler/2, {self(), Fun}}),
+ {ok, Tracer} = dbg:get_tracer(),
+ MRef = monitor(process, Tracer),
+ {ok, _} = dbg:p(self(), [call]),
+ {ok, _} = dbg:p(new, [call]),
+ {ok, _} = dbg:tp(?MODULE, dummy, []),
+ ?MODULE:dummy(),
+ receive {'DOWN', MRef, _, _, normal} -> ok end,
+ [{trace,_,call,{?MODULE, dummy,[]}},
+ {trace,_,call,{?MODULE, dummy,[]}}] = flush(),
+ ok.
+
+spawn_once_handler(Event, {Pid, done} = State) ->
+ Pid ! Event,
+ State;
+spawn_once_handler(Event, {Pid, Fun}) ->
+ {_, Ref} = spawn_monitor(Fun),
+ receive
+ {'DOWN', Ref, _, _, _} ->
+ Pid ! Event,
+ {Pid, done}
+ end.
%%
%% Support functions
diff --git a/lib/sasl/test/release_handler_SUITE.erl b/lib/sasl/test/release_handler_SUITE.erl
index bd7414fbb4..b7c5f34f58 100644
--- a/lib/sasl/test/release_handler_SUITE.erl
+++ b/lib/sasl/test/release_handler_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2011-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2011-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -1802,11 +1802,17 @@ upgrade_gg(cleanup,Config) ->
%%%-----------------------------------------------------------------
%%% OTP-10463, Bug - release_handler could not handle regexp in appup
%%% files.
-otp_10463_upgrade_script_regexp(_Config) ->
- %% Assuming that kernel always has a regexp in it's appup
- KernelVsn = vsn(kernel,current),
- {ok,KernelVsn,_} =
- release_handler:upgrade_script(kernel,code:lib_dir(kernel)),
+otp_10463_upgrade_script_regexp(Config) ->
+ DataDir = ?config(data_dir,Config),
+ code:add_path(filename:join([DataDir,regexp_appup,app1,ebin])),
+ application:start(app1),
+ {ok,"1.1",_} = release_handler:upgrade_script(app1,code:lib_dir(app1)),
+ ok.
+
+otp_10463_upgrade_script_regexp(cleanup,Config) ->
+ DataDir = ?config(data_dir,Config),
+ application:stop(app1),
+ code:del_path(filename:join([DataDir,regexp_appup,app1,ebin])),
ok.
no_dot_erlang(Conf) ->
diff --git a/lib/sasl/test/release_handler_SUITE_data/regexp_appup/app1/ebin/app1.app b/lib/sasl/test/release_handler_SUITE_data/regexp_appup/app1/ebin/app1.app
new file mode 100644
index 0000000000..ba6d09cd42
--- /dev/null
+++ b/lib/sasl/test/release_handler_SUITE_data/regexp_appup/app1/ebin/app1.app
@@ -0,0 +1,29 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%% This is an -*- erlang -*- file.
+%%
+{application, app1,
+ [
+ {description, "Test that release_handler can read appup with regexp"},
+ {vsn, "1.1"},
+ {modules, []},
+ {registered, []},
+ {applications, []}
+ ]
+}.
diff --git a/lib/sasl/test/release_handler_SUITE_data/regexp_appup/app1/ebin/app1.appup b/lib/sasl/test/release_handler_SUITE_data/regexp_appup/app1/ebin/app1.appup
new file mode 100644
index 0000000000..9c657232d0
--- /dev/null
+++ b/lib/sasl/test/release_handler_SUITE_data/regexp_appup/app1/ebin/app1.appup
@@ -0,0 +1,23 @@
+%% -*- erlang -*-
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+{"1.1",
+ %% Up from
+ [{<<"1(\\.[0-9]+)*">>,[{restart_application,app1}]}],
+ %% Down to
+ [{<<"1(\\.[0-9]+)*">>,[{restart_application,app1}]}]
+}.
diff --git a/lib/ssh/doc/src/ssh.xml b/lib/ssh/doc/src/ssh.xml
index 9f5d1c003d..d481a75c9a 100644
--- a/lib/ssh/doc/src/ssh.xml
+++ b/lib/ssh/doc/src/ssh.xml
@@ -234,11 +234,11 @@
<taglist>
<tag><c><![CDATA[{inet, inet | inet6}]]></c></tag>
<item> IP version to use when the host address is specified as <c>any</c>. </item>
- <tag><c><![CDATA[{subsystems, [subsystem_spec()]]]></c></tag>
+ <tag><c><![CDATA[{subsystems, [subsystem_spec()]}]]></c></tag>
<item>
Provides specifications for handling of subsystems. The
"sftp" subsystem spec can be retrieved by calling
- ssh_sftpd:subsystem_spec/1. If the subsystems option in
+ ssh_sftpd:subsystem_spec/1. If the subsystems option is
not present the value of
<c>[ssh_sftpd:subsystem_spec([])]</c> will be used. It is
of course possible to set the option to the empty list if
diff --git a/lib/ssh/doc/src/using_ssh.xml b/lib/ssh/doc/src/using_ssh.xml
index 9ab71260d3..46178d4018 100644
--- a/lib/ssh/doc/src/using_ssh.xml
+++ b/lib/ssh/doc/src/using_ssh.xml
@@ -79,7 +79,7 @@
<p> The option user_dir defaults to the users ~/.ssh directory</p>
<p>In the following example we generate new keys and host keys as
- to be able to run the example without having root privilages</p>
+ to be able to run the example without having root privileges</p>
<code>
$bash> ssh-keygen -t rsa -f /tmp/ssh_daemon/ssh_host_rsa_key
diff --git a/lib/ssh/src/ssh_connection_handler.erl b/lib/ssh/src/ssh_connection_handler.erl
index 915060c426..68523aa72b 100644
--- a/lib/ssh/src/ssh_connection_handler.erl
+++ b/lib/ssh/src/ssh_connection_handler.erl
@@ -1711,7 +1711,7 @@ handshake(Pid, Ref, Timeout) ->
{error, Reason}
after Timeout ->
stop(Pid),
- {error, Timeout}
+ {error, timeout}
end.
start_timeout(_,_, infinity) ->
diff --git a/lib/ssh/src/ssh_info.erl b/lib/ssh/src/ssh_info.erl
index 9ed598b3ab..e5a8666af0 100644
--- a/lib/ssh/src/ssh_info.erl
+++ b/lib/ssh/src/ssh_info.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -179,7 +179,14 @@ line(Len, Char) ->
datetime() ->
- {{YYYY,MM,DD}, {H,M,S}} = calendar:now_to_universal_time(now()),
+ %% Adapt to new OTP 18 erlang time API and be back-compatible
+ TimeStamp = try
+ erlang:timestamp()
+ catch
+ error:undef ->
+ erlang:now()
+ end,
+ {{YYYY,MM,DD}, {H,M,S}} = calendar:now_to_universal_time(TimeStamp),
lists:flatten(io_lib:format('~4w-~2..0w-~2..0w ~2..0w:~2..0w:~2..0w UTC',[YYYY,MM,DD, H,M,S])).
diff --git a/lib/ssh/src/ssh_sftpd.erl b/lib/ssh/src/ssh_sftpd.erl
index 52665635f0..04ae6b11e2 100644
--- a/lib/ssh/src/ssh_sftpd.erl
+++ b/lib/ssh/src/ssh_sftpd.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2005-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2005-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -559,56 +559,73 @@ stat(ReqId, RelPath, State0=#state{file_handler=FileMod,
send_status({error, E}, ReqId, State1)
end.
-decode_4_open_flag(create_new) ->
- [write];
-decode_4_open_flag(create_truncate) ->
- [write];
-decode_4_open_flag(truncate_existing) ->
- [write];
-decode_4_open_flag(open_existing) ->
- [read].
-
-decode_4_flags([OpenFlag | Flags]) ->
- decode_4_flags(Flags, decode_4_open_flag(OpenFlag)).
-
-decode_4_flags([], Flags) ->
- Flags;
-decode_4_flags([append_data|R], _Flags) ->
- decode_4_flags(R, [append]);
-decode_4_flags([append_data_atomic|R], _Flags) ->
- decode_4_flags(R, [append]);
-decode_4_flags([_|R], Flags) ->
- decode_4_flags(R, Flags).
-
-decode_4_access_flag(read_data) ->
- [read];
-decode_4_access_flag(list_directory) ->
- [read];
-decode_4_access_flag(write_data) ->
- [write];
-decode_4_access_flag(add_file) ->
- [write];
-decode_4_access_flag(add_subdirectory) ->
- [read];
-decode_4_access_flag(append_data) ->
- [append];
-decode_4_access_flag(write_attributes) ->
- [write];
-decode_4_access_flag(_) ->
- [read].
-
-decode_4_acess([_ | _] = Flags) ->
+sftp_to_erlang_flag(read, Vsn) when Vsn == 3;
+ Vsn == 4 ->
+ read;
+sftp_to_erlang_flag(write, Vsn) when Vsn == 3;
+ Vsn == 4 ->
+ write;
+sftp_to_erlang_flag(append, Vsn) when Vsn == 3;
+ Vsn == 4 ->
+ append;
+sftp_to_erlang_flag(creat, Vsn) when Vsn == 3;
+ Vsn == 4 ->
+ write;
+sftp_to_erlang_flag(trunc, Vsn) when Vsn == 3;
+ Vsn == 4 ->
+ write;
+sftp_to_erlang_flag(excl, Vsn) when Vsn == 3;
+ Vsn == 4 ->
+ read;
+sftp_to_erlang_flag(create_new, Vsn) when Vsn > 4 ->
+ write;
+sftp_to_erlang_flag(create_truncate, Vsn) when Vsn > 4 ->
+ write;
+sftp_to_erlang_flag(open_existing, Vsn) when Vsn > 4 ->
+ read;
+sftp_to_erlang_flag(open_or_create, Vsn) when Vsn > 4 ->
+ write;
+sftp_to_erlang_flag(truncate_existing, Vsn) when Vsn > 4 ->
+ write;
+sftp_to_erlang_flag(append_data, Vsn) when Vsn > 4 ->
+ append;
+sftp_to_erlang_flag(append_data_atomic, Vsn) when Vsn > 4 ->
+ append;
+sftp_to_erlang_flag(_, _) ->
+ read.
+
+sftp_to_erlang_flags(Flags, Vsn) ->
lists:map(fun(Flag) ->
- [decode_4_access_flag(Flag)]
- end, Flags);
-decode_4_acess([]) ->
- [].
+ sftp_to_erlang_flag(Flag, Vsn)
+ end, Flags).
+
+sftp_to_erlang_access_flag(read_data, _) ->
+ read;
+sftp_to_erlang_access_flag(list_directory, _) ->
+ read;
+sftp_to_erlang_access_flag(write_data, _) ->
+ write;
+sftp_to_erlang_access_flag(append_data, _) ->
+ append;
+sftp_to_erlang_access_flag(add_subdirectory, _) ->
+ read;
+sftp_to_erlang_access_flag(add_file, _) ->
+ write;
+sftp_to_erlang_access_flag(write_attributes, _) ->
+ write;
+sftp_to_erlang_access_flag(_, _) ->
+ read.
+sftp_to_erlang_access_flags(Flags, Vsn) ->
+ lists:map(fun(Flag) ->
+ sftp_to_erlang_access_flag(Flag, Vsn)
+ end, Flags).
open(Vsn, ReqId, Data, State) when Vsn =< 3 ->
<<?UINT32(BLen), BPath:BLen/binary, ?UINT32(PFlags),
_Attrs/binary>> = Data,
Path = unicode:characters_to_list(BPath),
- Flags = ssh_xfer:decode_open_flags(Vsn, PFlags),
+ FlagBits = ssh_xfer:decode_open_flags(Vsn, PFlags),
+ Flags = lists:usort(sftp_to_erlang_flags(FlagBits, Vsn)),
do_open(ReqId, State, Path, Flags);
open(Vsn, ReqId, Data, State) when Vsn >= 4 ->
<<?UINT32(BLen), BPath:BLen/binary, ?UINT32(Access),
@@ -616,15 +633,12 @@ open(Vsn, ReqId, Data, State) when Vsn >= 4 ->
Path = unicode:characters_to_list(BPath),
FlagBits = ssh_xfer:decode_open_flags(Vsn, PFlags),
AcessBits = ssh_xfer:decode_ace_mask(Access),
- %% TODO: This is to make sure the Access flags are not ignored
- %% but this should be thought through better. This solution should
- %% be considered a hack in order to buy some time. At least
- %% it works better than when the Access flags where totally ignored.
- %% A better solution may need some code refactoring that we do
- %% not have time for right now.
- AcessFlags = decode_4_acess(AcessBits),
- Flags = lists:append(lists:umerge(
- [[decode_4_flags(FlagBits)] | AcessFlags])),
+ %% TODO: There are still flags that are not
+ %% fully handled as SSH_FXF_ACCESS_TEXT_MODE and
+ %% a lot a ACE flags, the later we may not need
+ %% to understand as they are NFS flags
+ AcessFlags = sftp_to_erlang_access_flags(AcessBits, Vsn),
+ Flags = lists:usort(sftp_to_erlang_flags(FlagBits, Vsn) ++ AcessFlags),
do_open(ReqId, State, Path, Flags).
do_open(ReqId, State0, Path, Flags) ->
diff --git a/lib/ssh/test/ssh_basic_SUITE.erl b/lib/ssh/test/ssh_basic_SUITE.erl
index 415cb9fc9c..b449012ffc 100644
--- a/lib/ssh/test/ssh_basic_SUITE.erl
+++ b/lib/ssh/test/ssh_basic_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -715,7 +715,14 @@ ssh_connect_arg4_timeout(_Config) ->
%% try to connect with a timeout, but "supervise" it
Client = spawn(fun() ->
- T0 = now(),
+ %% Adapt to OTP 18 erlang time API and be back-compatible
+ T0 = try
+ erlang:monotonic_time()
+ catch
+ error:undef ->
+ %% Use Erlang system time as monotonic time
+ erlang:now()
+ end,
Rc = ssh:connect("localhost",Port,[],Timeout),
ct:log("Client ssh:connect got ~p",[Rc]),
Parent ! {done,self(),Rc,T0}
@@ -723,16 +730,22 @@ ssh_connect_arg4_timeout(_Config) ->
%% Wait for client reaction on the connection try:
receive
- {done, Client, {error,_E}, T0} ->
- Msp = ms_passed(T0, now()),
+ {done, Client, {error,timeout}, T0} ->
+ Msp = ms_passed(T0),
exit(Server,hasta_la_vista___baby),
Low = 0.9*Timeout,
High = 1.1*Timeout,
- ct:log("Timeout limits: ~p--~p, timeout was ~p, expected ~p",[Low,High,Msp,Timeout]),
+ ct:log("Timeout limits: ~.4f - ~.4f ms, timeout "
+ "was ~.4f ms, expected ~p ms",[Low,High,Msp,Timeout]),
if
Low<Msp, Msp<High -> ok;
true -> {fail, "timeout not within limits"}
end;
+
+ {done, Client, {error,Other}, _T0} ->
+ ct:log("Error message \"~p\" from the client is unexpected.",[{error,Other}]),
+ {fail, "Unexpected error message"};
+
{done, Client, {ok,_Ref}, _T0} ->
{fail,"ssh-connected ???"}
after
@@ -743,12 +756,16 @@ ssh_connect_arg4_timeout(_Config) ->
end.
-%% Help function
-%% N2-N1
-ms_passed(N1={_,_,M1}, N2={_,_,M2}) ->
- {0,{0,Min,Sec}} = calendar:time_difference(calendar:now_to_local_time(N1),
- calendar:now_to_local_time(N2)),
- 1000 * (Min*60 + Sec + (M2-M1)/1000000).
+%% Help function, elapsed milliseconds since T0
+ms_passed({_,_,_} = T0 ) ->
+ %% OTP 17 and earlier
+ timer:now_diff(erlang:now(), T0)/1000;
+
+ms_passed(T0) ->
+ %% OTP 18
+ erlang:convert_time_resolution(erlang:monotonic_time() - T0,
+ erlang:time_resolution(),
+ 1000000)/1000.
%%--------------------------------------------------------------------
ssh_connect_negtimeout_parallel(Config) -> ssh_connect_negtimeout(Config,true).
diff --git a/lib/ssh/test/ssh_sftpd_SUITE.erl b/lib/ssh/test/ssh_sftpd_SUITE.erl
index 7b22e45d5e..0ce8eec906 100644
--- a/lib/ssh/test/ssh_sftpd_SUITE.erl
+++ b/lib/ssh/test/ssh_sftpd_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -56,7 +56,8 @@ all() ->
retrieve_attributes,
set_attributes,
links,
- ver3_rename,
+ ver3_rename,
+ ver3_open_flags,
relpath,
sshd_read_file,
ver6_basic].
@@ -193,6 +194,39 @@ open_close_file(Config) when is_list(Config) ->
?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES,
?SSH_FXF_OPEN_EXISTING).
+ver3_open_flags() ->
+ [{doc, "Test open flags"}].
+ver3_open_flags(Config) when is_list(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ FileName = filename:join(PrivDir, "not_exist.txt"),
+ {Cm, Channel} = ?config(sftp, Config),
+ ReqId = 0,
+
+ {ok, <<?SSH_FXP_HANDLE, ?UINT32(ReqId), Handle/binary>>, _} =
+ open_file_v3(FileName, Cm, Channel, ReqId,
+ ?SSH_FXF_CREAT bor ?SSH_FXF_TRUNC),
+ {ok, <<?SSH_FXP_STATUS, ?UINT32(ReqId),
+ ?UINT32(?SSH_FX_OK), _/binary>>, _} = close(Handle, ReqId,
+ Cm, Channel),
+
+ NewFileName = filename:join(PrivDir, "not_exist2.txt"),
+ NewReqId = ReqId + 1,
+ {ok, <<?SSH_FXP_HANDLE, ?UINT32(NewReqId), NewHandle/binary>>, _} =
+ open_file_v3(NewFileName, Cm, Channel, NewReqId,
+ ?SSH_FXF_CREAT bor ?SSH_FXF_EXCL),
+ {ok, <<?SSH_FXP_STATUS, ?UINT32(NewReqId),
+ ?UINT32(?SSH_FX_OK), _/binary>>, _} = close(NewHandle, NewReqId,
+ Cm, Channel),
+
+ NewFileName1 = filename:join(PrivDir, "test.txt"),
+ NewReqId1 = NewReqId + 1,
+ {ok, <<?SSH_FXP_HANDLE, ?UINT32(NewReqId1), NewHandle1/binary>>, _} =
+ open_file_v3(NewFileName1, Cm, Channel, NewReqId1,
+ ?SSH_FXF_READ bor ?SSH_FXF_WRITE bor ?SSH_FXF_APPEND),
+ {ok, <<?SSH_FXP_STATUS, ?UINT32(NewReqId1),
+ ?UINT32(?SSH_FX_OK), _/binary>>, _} = close(NewHandle1, NewReqId1,
+ Cm, Channel).
+
%%--------------------------------------------------------------------
open_close_dir() ->
[{doc,"Test SSH_FXP_OPENDIR and SSH_FXP_CLOSE commands"}].
@@ -662,6 +696,16 @@ open_file(File, Cm, Channel, ReqId, Access, Flags) ->
?SSH_FXP_OPEN, Data/binary>>),
reply(Cm, Channel).
+open_file_v3(File, Cm, Channel, ReqId, Flags) ->
+
+ Data = list_to_binary([?uint32(ReqId),
+ ?binary(list_to_binary(File)),
+ ?uint32(Flags),
+ ?REG_ATTERS]),
+ Size = 1 + size(Data),
+ ssh_connection:send(Cm, Channel, <<?UINT32(Size),
+ ?SSH_FXP_OPEN, Data/binary>>),
+ reply(Cm, Channel).
close(Handle, ReqId, Cm , Channel) ->
diff --git a/lib/ssl/doc/src/Makefile b/lib/ssl/doc/src/Makefile
index fb12499ef7..cfbf98f6e3 100644
--- a/lib/ssl/doc/src/Makefile
+++ b/lib/ssl/doc/src/Makefile
@@ -1,7 +1,7 @@
#
# %CopyrightBegin%
#
-# Copyright Ericsson AB 1999-2012. All Rights Reserved.
+# Copyright Ericsson AB 1999-2015. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -37,7 +37,7 @@ RELSYSDIR = $(RELEASE_PATH)/lib/$(APPLICATION)-$(VSN)
# Target Specs
# ----------------------------------------------------
XML_APPLICATION_FILES = refman.xml
-XML_REF3_FILES = ssl.xml ssl_session_cache_api.xml
+XML_REF3_FILES = ssl.xml ssl_crl_cache.xml ssl_crl_cache.xml ssl_session_cache_api.xml
XML_REF6_FILES = ssl_app.xml
XML_PART_FILES = release_notes.xml usersguide.xml
diff --git a/lib/ssl/doc/src/refman.xml b/lib/ssl/doc/src/refman.xml
index ae11198edb..d5f2219af9 100644
--- a/lib/ssl/doc/src/refman.xml
+++ b/lib/ssl/doc/src/refman.xml
@@ -4,7 +4,7 @@
<application xmlns:xi="http://www.w3.org/2001/XInclude">
<header>
<copyright>
- <year>1999</year><year>2013</year>
+ <year>1999</year><year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -28,23 +28,10 @@
<rev>B</rev>
<file>refman.sgml</file>
</header>
- <description>
- <p>The <em>SSL</em> application provides secure communication over
- sockets.
- </p>
- <p>This product includes software developed by the OpenSSL Project for
- use in the OpenSSL Toolkit (http://www.openssl.org/).
- </p>
- <p>This product includes cryptographic software written by Eric Young
- </p>
- <p>This product includes software written by Tim Hudson
- </p>
- <p>For full OpenSSL and SSLeay license texts, see <seealso marker="licenses#licenses">Licenses</seealso>.</p>
- </description>
<xi:include href="ssl_app.xml"/>
<xi:include href="ssl.xml"/>
+ <xi:include href="ssl_crl_cache.xml"/>
+ <xi:include href="ssl_crl_cache_api.xml"/>
<xi:include href="ssl_session_cache_api.xml"/>
</application>
diff --git a/lib/ssl/doc/src/ssl.xml b/lib/ssl/doc/src/ssl.xml
index 83e5ed82bb..c9b02d44ec 100644
--- a/lib/ssl/doc/src/ssl.xml
+++ b/lib/ssl/doc/src/ssl.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>1999</year><year>2014</year>
+ <year>1999</year><year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -38,7 +38,9 @@
<item>ssl requires the crypto and public_key applications.</item>
<item>Supported SSL/TLS-versions are SSL-3.0, TLS-1.0,
TLS-1.1 and TLS-1.2.</item>
- <item>For security reasons sslv2 is not supported.</item>
+ <item>For security reasons SSL-2.0 is not supported.</item>
+ <item>For security reasons SSL-3.0 is no longer supported by default,
+ but may be configured.</item>
<item>Ephemeral Diffie-Hellman cipher suites are supported
but not Diffie Hellman Certificates cipher suites.</item>
<item>Elliptic Curve cipher suites are supported if crypto
@@ -49,9 +51,9 @@
<item>IDEA cipher suites are not supported as they have
become deprecated by the latest TLS spec so there is not any
real motivation to implement them.</item>
- <item>CRL and policy certificate extensions are not supported
- yet. However CRL verification is supported by public_key, only not integrated
- in ssl yet. </item>
+ <item>CRL validation is supported.</item>
+ <item>Policy certificate extensions are not supported
+ yet. </item>
<item>Support for 'Server Name Indication' extension client side
(RFC 6066 section 3).</item>
</list>
@@ -163,7 +165,7 @@
is supplied it will override the certfile option.</item>
<tag>{certfile, path()}</tag>
- <item>Path to a file containing the user's certificate.</item>
+ <item>Path to a file containing the user's PEM encoded certificate.</item>
<tag>{key, {'RSAPrivateKey'| 'DSAPrivateKey' | 'ECPrivateKey' |'PrivateKeyInfo', der_encoded()}}</tag>
<item> The DER encoded users private key. If this option
@@ -299,10 +301,47 @@ fun(OtpCert :: #'OTPCertificate'{}, Event :: {bad_cert, Reason :: atom() | {revo
<item> Possible such reasons see <seealso
marker="public_key:public_key#pkix_path_validation-3"> public_key:pkix_path_validation/3 </seealso></item>
</taglist>
+ </item>
+
+ <tag>{crl_check, boolean() | peer | best_effort )</tag>
+ <item>
+ Perform CRL (Certificate Revocation List) verification
+ <seealso marker="public_key:public_key#pkix_crl_validate-3">
+ public_key:pkix_crls_validate/3</seealso>, during the
+ <seealso
+ marker="public_key:public_key#pkix_path_validation-3">public_key:pkix_path_validation/3 </seealso>
+ invokation on all the certificates in the peer certificate chain. Defaults to
+ false.
+ <p><c>peer</c> - check is only performed on
+ the peer certificate.</p>
+
+ <p><c>best_effort</c> - if certificate revokation status can not be determined
+ it will be accepted as valid.</p>
+
+ <p>The CA certificates specified for the connection will be used to
+ construct the certificate chain validating the CRLs.</p>
+
+ <p>The CRLs will be fetched from a local or external cache
+ <seealso marker="ssl:ssl_crl_cache_api">ssl_crl_cache_api(3)</seealso>.</p>
</item>
+ <tag>{crl_cache, {Module::atom, {DbHandle::internal | term(), Args::list()}}</tag>
+ <item>
+ <p>Module defaults to ssl_crl_cache with <c> DbHandle </c> internal and an
+ empty argument list. The following arguments may be specified for the internal cache.</p>
+ <taglist>
+ <tag>{http, timeout()}</tag>
+ <item>
+ Enables fetching of CRLs specified as http URIs in<seealso
+ marker="public_key:cert_records"> X509 cerificate extensions.</seealso>
+ Requires the OTP inets application.
+ </item>
+ </taglist>
+ </item>
+
<tag>{partial_chain, fun(Chain::[DerCert]) -> {trusted_ca, DerCert} | unknown_ca </tag>
+
<item>
Claim an intermediat CA in the chain as trusted. TLS will then perform the public_key:pkix_path_validation/3
with the selected CA as trusted anchor and the rest of the chain.
@@ -311,7 +350,7 @@ fun(OtpCert :: #'OTPCertificate'{}, Event :: {bad_cert, Reason :: atom() | {revo
<tag>{versions, [protocol()]}</tag>
<item>TLS protocol versions that will be supported by started clients and servers.
This option overrides the application environment option <c>protocol_version</c>. If the
- environment option is not set it defaults to all versions supported by the SSL application. See also
+ environment option is not set it defaults to all versions, except SSL-3.0, supported by the SSL application. See also
<seealso marker="ssl:ssl_app">ssl(6)</seealso>
</item>
@@ -348,11 +387,23 @@ fun(srp, Username :: string(), UserState :: term()) ->
</p>
</item>
+ <tag>{padding_check, boolean()}</tag>
+ <item>
+ <p> This option only affects TLS-1.0 connections.
+ If set to false it disables the block cipher padding check
+ to be able to interoperate with legacy software.
+ </p>
+
+ <warning><p> Using this option makes TLS vulnerable to
+ the Poodle attack</p></warning>
+
+ </item>
+
</taglist>
-
+
</section>
-
- <section>
+
+ <section>
<title>SSL OPTION DESCRIPTIONS - CLIENT SIDE</title>
<p>Options described here are client specific or has a slightly different
@@ -413,6 +464,23 @@ fun(srp, Username :: string(), UserState :: term()) ->
Indication extension will be sent if possible, this option may also be
used to disable that behavior.</p>
</item>
+ <tag>{fallback, boolean()}</tag>
+ <item>
+ <p> Send special cipher suite TLS_FALLBACK_SCSV to avoid undesired TLS version downgrade.
+ Defaults to false</p>
+ <warning><p>Note this option is not needed in normal TLS usage and should not be used
+ to implement new clients. But legacy clients that that retries connections in the following manner</p>
+
+ <p><c> ssl:connect(Host, Port, [...{versions, ['tlsv2', 'tlsv1.1', 'tlsv1', 'sslv3']}])</c></p>
+ <p><c> ssl:connect(Host, Port, [...{versions, [tlsv1.1', 'tlsv1', 'sslv3']}, {fallback, true}])</c></p>
+ <p><c> ssl:connect(Host, Port, [...{versions, ['tlsv1', 'sslv3']}, {fallback, true}]) </c></p>
+ <p><c> ssl:connect(Host, Port, [...{versions, ['sslv3']}, {fallback, true}]) </c></p>
+
+ <p>may use it to avoid undesired TLS version downgrade. Note that TLS_FALLBACK_SCSV must also
+ be supported by the server for the prevention to work.
+ </p></warning>
+ </item>
+
</taglist>
</section>
@@ -538,7 +606,19 @@ fun(srp, Username :: string(), UserState :: term()) ->
</p>
</desc>
</func>
-
+
+ <func>
+ <name>clear_pem_cache() -> ok </name>
+ <fsummary> Clears the pem cache</fsummary>
+
+ <desc><p>PEM files, used by ssl API-functions, are cached. The
+ cache is regularly checked to see if any cache entries should be
+ invalidated, however this function provides a way to
+ unconditionally clear the whole cache.
+ </p>
+ </desc>
+ </func>
+
<func>
<name>connect(Socket, SslOptions) -> </name>
<name>connect(Socket, SslOptions, Timeout) -> {ok, SslSocket}
@@ -904,19 +984,37 @@ fun(srp, Username :: string(), UserState :: term()) ->
</func>
<func>
- <name>versions() ->
- [{SslAppVer, SupportedSslVer, AvailableSslVsn}]</name>
+ <name>versions() -> [versions_info()]</name>
<fsummary>Returns version information relevant for the
ssl application.</fsummary>
<type>
- <v>SslAppVer = string()</v>
- <v>SupportedSslVer = [protocol()]</v>
- <v>AvailableSslVsn = [protocol()]</v>
+ <v>versions_info() = {app_vsn, string()} | {supported | available, [protocol()] </v>
</type>
<desc>
<p>
Returns version information relevant for the
- ssl application.</p>
+ ssl application.
+ </p>
+ <taglist>
+ <tag>app_vsn</tag>
+ <item> The application version of the OTP ssl application.</item>
+
+ <tag>supported</tag>
+
+ <item>TLS/SSL versions supported by default.
+ Overridden by a versions option on
+ <seealso marker="#connect-2"> connect/[2,3,4]</seealso>, <seealso
+ marker="#listen-2"> listen/2</seealso> and <seealso
+ marker="#ssl_accept-2">ssl_accept/[1,2,3]</seealso>. For the
+ negotiated TLS/SSL version see <seealso
+ marker="#connection_info-1">ssl:connection_info/1
+ </seealso></item>
+
+ <tag>available</tag>
+ <item>All TLS/SSL versions that the Erlang ssl application
+ can support. Note that TLS 1.2 requires sufficient support
+ from the crypto application. </item>
+ </taglist>
</desc>
</func>
<func>
diff --git a/lib/ssl/doc/src/ssl_app.xml b/lib/ssl/doc/src/ssl_app.xml
index c8024548b5..e3a3fc27f2 100644
--- a/lib/ssl/doc/src/ssl_app.xml
+++ b/lib/ssl/doc/src/ssl_app.xml
@@ -4,7 +4,7 @@
<appref>
<header>
<copyright>
- <year>1999</year><year>2014</year>
+ <year>1999</year><year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -82,7 +82,16 @@
callback module, defaults to [].
</p>
</item>
-
+
+ <tag><c><![CDATA[ssl_pem_cache_clean = integer() <optional>]]></c></tag>
+ <item>
+ <p>
+ Number of milliseconds between PEM cache validations.
+ </p>
+ <seealso
+ marker="ssl#clear_pem_cache-0">ssl:clear_pem_cache/0</seealso>
+
+ </item>
</taglist>
</section>
diff --git a/lib/ssl/doc/src/ssl_crl_cache.xml b/lib/ssl/doc/src/ssl_crl_cache.xml
new file mode 100644
index 0000000000..1ed76d3e2a
--- /dev/null
+++ b/lib/ssl/doc/src/ssl_crl_cache.xml
@@ -0,0 +1,66 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!DOCTYPE erlref SYSTEM "erlref.dtd">
+
+<erlref>
+ <header>
+ <copyright>
+ <year>2015</year><year>2015</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+ </legalnotice>
+ <title>ssl_crl_cache</title>
+ <file>ssl_crl_cache.xml</file>
+ </header>
+
+ <module>ssl_crl_cache</module>
+ <modulesummary>CRL cache </modulesummary>
+ <description>
+ <p>
+ Implements an internal CRL (Certificate Revocation List) cache.
+ In addition to implementing the <seealso
+ marker="ssl_cache_crl_api"> ssl_cache_crl_api</seealso>
+ the following functions are available.
+ </p>
+ </description>
+
+ <funcs>
+ <func>
+ <name>insert(CRLSrc) -> ok | {error, Reason}</name>
+ <name>insert(URI, CRLSrc) -> ok | {error, Reason}</name>
+ <fsummary> </fsummary>
+ <type>
+ <v> CRLSrc = {file, string()} | {der, [ <seealso
+ marker="public_key:public_key"> der_encoded() </seealso> ]}</v>
+ <v> URI = http_uri:uri()</v>
+ <v> Reason = term()</v>
+ </type>
+ <desc>
+ Insert CRLs into the ssl applications local cache.
+ </desc>
+ </func>
+
+ <func>
+ <name>delete(Entries) -> ok | {error, Reason} </name>
+ <fsummary> </fsummary>
+ <type>
+ <v> Entries = http_uri:uri() | {file, string()} | {der, [<seealso
+ marker="public_key:public_key"> der_encoded() </seealso>]}</v>
+ <v> Reason = term()</v>
+ </type>
+ <desc>
+ Delete CRLs from the ssl applications local cache.
+ </desc>
+ </func>
+ </funcs>
+</erlref> \ No newline at end of file
diff --git a/lib/ssl/doc/src/ssl_crl_cache_api.xml b/lib/ssl/doc/src/ssl_crl_cache_api.xml
new file mode 100644
index 0000000000..24365c9f59
--- /dev/null
+++ b/lib/ssl/doc/src/ssl_crl_cache_api.xml
@@ -0,0 +1,97 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!DOCTYPE erlref SYSTEM "erlref.dtd">
+
+<erlref>
+ <header>
+ <copyright>
+ <year>2015</year><year>2015</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+ </legalnotice>
+ <title>ssl_crl_cache_api</title>
+ <file>ssl_crl_cache_api.xml</file>
+ </header>
+
+ <module>ssl_crl_cache_api</module>
+ <modulesummary>API for a SSL/TLS CRL (Certificate Revocation List) cache.</modulesummary>
+ <description>
+ <p>
+ When SSL/TLS performs certificate path validation according to
+ <url href="http://www.ietf.org/rfc/rfc5280.txt">RFC 5280 </url> it should
+ also perform CRL validation checks. To enable the CRL checks the application
+ needs access to CRLs. A database of CRLs can be set up in many different ways.
+ This module provides an API to integrate an arbitrary CRL cache with the erlang
+ ssl application. It is also used by the application itself to provide a simple
+ default implementation of a CRL cache.
+ </p>
+ </description>
+
+ <section>
+ <title>Common Data Types</title>
+
+ <p>The following data types are used in the functions below:
+ </p>
+
+ <p><c>cache_ref() = opaque()</c></p>
+ <p> dist_point() = #'DistributionPoint'{} see <seealso
+ marker="public_key:cert_records"> X509 certificates records</seealso></p>
+ </section>
+
+ <funcs>
+ <func>
+ <name>lookup(DistributionPoint, DbHandle) -> not_available | CRLs </name>
+ <fsummary> </fsummary>
+ <type>
+ <v> DistributionPoint = dist_point() </v>
+ <v> DbHandle = cache_ref() </v>
+ <v> CRLs = [<seealso
+ marker="public_key:public_key">public_key:der_encoded()</seealso>] </v>
+ </type>
+ <desc> <p>Lookup the CRLs belonging to the distribution point <c> Distributionpoint </c> </p>.
+ This function may choose to only look in the cache or to follow distribution point
+ links depending on how the cache is administrated.
+ </desc>
+ </func>
+
+ <func>
+ <name>select(Issuer, DbHandle) -> CRLs </name>
+ <fsummary>Select the CRLs in the cache that are issued by <c>Issuer</c></fsummary>
+ <type>
+ <v> Issuer = <seealso
+ marker="public_key:public_key">public_key:issuer_name()</seealso></v>
+ <v> DbHandle = cache_ref() </v>
+ </type>
+ <desc>
+ <p>Select the CRLs in the cache that are issued by <c>Issuer</c> </p>
+ </desc>
+ </func>
+
+ <func>
+ <name>fresh_crl(DistributionPoint, CRL) -> FreshCRL</name>
+ <fsummary> <c>fun fresh_crl/2 </c> will be used as input option <c>update_crl</c> to
+ public_key:pkix_crls_validate/3 </fsummary>
+ <type>
+ <v> DistributionPoint = dist_point() </v>
+ <v> CRL = [<seealso
+ marker="public_key:public_key">public_key:der_encoded()</seealso>] </v>
+ <v> FreshCRL = [<seealso
+ marker="public_key:public_key">public_key:der_encoded()</seealso>] </v>
+ </type>
+ <desc>
+ <p> <c>fun fresh_crl/2 </c> will be used as input option <c>update_crl</c> to
+ <seealso marker="public_key#pkix_path_validation-3">public_key:pkix_crls_validate/3 </seealso> </p>
+ </desc>
+ </func>
+ </funcs>
+</erlref> \ No newline at end of file
diff --git a/lib/ssl/doc/src/ssl_session_cache_api.xml b/lib/ssl/doc/src/ssl_session_cache_api.xml
index cb97bbfbb2..9f87d31e90 100644
--- a/lib/ssl/doc/src/ssl_session_cache_api.xml
+++ b/lib/ssl/doc/src/ssl_session_cache_api.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>1999</year><year>2014</year>
+ <year>1999</year><year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -119,14 +119,14 @@
<func>
<name>select_session(Cache, PartialKey) -> [session()]</name>
- <fsummary>>Selects sessions that could be reused.</fsummary>
+ <fsummary>Selects a sessions that could be reused.</fsummary>
<type>
<v> Cache = cache_ref()</v>
<v> PartialKey = partialkey()</v>
<v> Session = session()</v>
</type>
<desc>
- <p>Selects sessions that could be reused. Should be callable
+ <p>Selects a sessions that could be reused. Should be callable
from any process.
</p>
</desc>
diff --git a/lib/ssl/src/Makefile b/lib/ssl/src/Makefile
index 0c00a650b9..d71d3fc445 100644
--- a/lib/ssl/src/Makefile
+++ b/lib/ssl/src/Makefile
@@ -1,7 +1,7 @@
#
# %CopyrightBegin%
#
-# Copyright Ericsson AB 1999-2014. All Rights Reserved.
+# Copyright Ericsson AB 1999-2015. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -38,7 +38,8 @@ RELSYSDIR = $(RELEASE_PATH)/lib/ssl-$(VSN)
# ----------------------------------------------------
BEHAVIOUR_MODULES= \
- ssl_session_cache_api
+ ssl_session_cache_api \
+ ssl_crl_cache_api
MODULES= \
ssl \
@@ -65,6 +66,8 @@ MODULES= \
ssl_manager \
ssl_session \
ssl_session_cache \
+ ssl_crl\
+ ssl_crl_cache \
ssl_socket \
ssl_listen_tracker_sup \
tls_record \
@@ -164,5 +167,5 @@ $(EBIN)/ssl_session_cache.$(EMULATOR): ssl_internal.hrl ssl_handshake.hrl
$(EBIN)/ssl_session_cache_api.$(EMULATOR): ssl_internal.hrl ssl_handshake.hrl
$(EBIN)/ssl_ssl3.$(EMULATOR): ssl_internal.hrl ssl_record.hrl ssl_cipher.hrl
$(EBIN)/ssl_tls1.$(EMULATOR): ssl_internal.hrl ssl_record.hrl ssl_cipher.hrl
-
+$(EBIN)/ssl_cache.$(EMULATOR): ssl_cache.erl ssl_internal.hrl ../../public_key/include/public_key.hrl
diff --git a/lib/ssl/src/dtls_record.erl b/lib/ssl/src/dtls_record.erl
index c0776e822b..59b3ddec5c 100644
--- a/lib/ssl/src/dtls_record.erl
+++ b/lib/ssl/src/dtls_record.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2013-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -194,7 +194,7 @@ decode_cipher_text(#ssl_tls{type = Type, version = Version,
compression_algorithm=CompAlg}
} = ReadState0}= ConnnectionStates0) ->
{PlainFragment, Mac, ReadState1} = ssl_record:decipher(dtls_v1:corresponding_tls_version(Version),
- CipherFragment, ReadState0),
+ CipherFragment, ReadState0, true),
MacHash = calc_mac_hash(ReadState1, Type, Version, Epoch, Seq, PlainFragment),
case ssl_record:is_correct_mac(Mac, MacHash) of
true ->
diff --git a/lib/ssl/src/ssl.app.src b/lib/ssl/src/ssl.app.src
index 36681e2897..955875fa95 100644
--- a/lib/ssl/src/ssl.app.src
+++ b/lib/ssl/src/ssl.app.src
@@ -39,6 +39,10 @@
ssl_manager,
ssl_pkix_db,
ssl_certificate,
+ %% CRL handling
+ ssl_crl,
+ ssl_crl_cache,
+ ssl_crl_cache_api,
%% App structure
ssl_app,
ssl_sup,
diff --git a/lib/ssl/src/ssl.appup.src b/lib/ssl/src/ssl.appup.src
index 7986722094..1476336039 100644
--- a/lib/ssl/src/ssl.appup.src
+++ b/lib/ssl/src/ssl.appup.src
@@ -1,14 +1,14 @@
%% -*- erlang -*-
{"%VSN%",
[
- {<<"5\\.3\\.[1-7]($|\\..*)">>, [{restart_application, ssl}]},
- {<<"5\\.[0-2]($|\\..*)">>, [{restart_application, ssl}]},
+ {<<"6\\..*">>, [{restart_application, ssl}]},
+ {<<"5\\..*">>, [{restart_application, ssl}]},
{<<"4\\..*">>, [{restart_application, ssl}]},
{<<"3\\..*">>, [{restart_application, ssl}]}
],
[
- {<<"5\\.3\\.[1-7]($|\\..*)">>, [{restart_application, ssl}]},
- {<<"5\\.[0-2]($|\\..*)">>, [{restart_application, ssl}]},
+ {<<"6\\..*">>, [{restart_application, ssl}]},
+ {<<"5\\..*">>, [{restart_application, ssl}]},
{<<"4\\..*">>, [{restart_application, ssl}]},
{<<"3\\..*">>, [{restart_application, ssl}]}
]
diff --git a/lib/ssl/src/ssl.erl b/lib/ssl/src/ssl.erl
index dcba69a65e..623fa92121 100644
--- a/lib/ssl/src/ssl.erl
+++ b/lib/ssl/src/ssl.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -353,12 +353,8 @@ cipher_suites(openssl) ->
|| S <- ssl_cipher:filter_suites(ssl_cipher:suites(Version))];
cipher_suites(all) ->
Version = tls_record:highest_protocol_version([]),
- Supported = ssl_cipher:all_suites(Version)
- ++ ssl_cipher:anonymous_suites(Version)
- ++ ssl_cipher:psk_suites(Version)
- ++ ssl_cipher:srp_suites(),
- ssl_cipher:filter_suites([suite_definition(S) || S <- Supported]).
-
+ ssl_cipher:filter_suites([suite_definition(S)
+ || S <-ssl_cipher:all_suites(Version)]).
cipher_suites() ->
cipher_suites(erlang).
@@ -454,7 +450,7 @@ session_info(#sslsocket{pid = {Listen,_}}) when is_port(Listen) ->
versions() ->
Vsns = tls_record:supported_protocol_versions(),
SupportedVsns = [tls_record:protocol_version(Vsn) || Vsn <- Vsns],
- AvailableVsns = ?ALL_SUPPORTED_VERSIONS,
+ AvailableVsns = ?ALL_AVAILABLE_VERSIONS,
%% TODO Add DTLS versions when supported
[{ssl_app, ?VSN}, {supported, SupportedVsns}, {available, AvailableVsns}].
@@ -656,7 +652,11 @@ handle_options(Opts0) ->
log_alert = handle_option(log_alert, Opts, true),
server_name_indication = handle_option(server_name_indication, Opts, undefined),
honor_cipher_order = handle_option(honor_cipher_order, Opts, false),
- protocol = proplists:get_value(protocol, Opts, tls)
+ protocol = proplists:get_value(protocol, Opts, tls),
+ padding_check = proplists:get_value(padding_check, Opts, true),
+ fallback = proplists:get_value(fallback, Opts, false),
+ crl_check = handle_option(crl_check, Opts, false),
+ crl_cache = handle_option(crl_cache, Opts, {ssl_crl_cache, {internal, []}})
},
CbInfo = proplists:get_value(cb_info, Opts, {gen_tcp, tcp, tcp_closed, tcp_error}),
@@ -669,7 +669,8 @@ handle_options(Opts0) ->
cb_info, renegotiate_at, secure_renegotiate, hibernate_after,
erl_dist, next_protocols_advertised,
client_preferred_next_protocols, log_alert,
- server_name_indication, honor_cipher_order],
+ server_name_indication, honor_cipher_order, padding_check, crl_check, crl_cache,
+ fallback],
SockOpts = lists:foldl(fun(Key, PropList) ->
proplists:delete(Key, PropList)
@@ -847,6 +848,16 @@ validate_option(server_name_indication, undefined) ->
undefined;
validate_option(honor_cipher_order, Value) when is_boolean(Value) ->
Value;
+validate_option(padding_check, Value) when is_boolean(Value) ->
+ Value;
+validate_option(fallback, Value) when is_boolean(Value) ->
+ Value;
+validate_option(crl_check, Value) when is_boolean(Value) ->
+ Value;
+validate_option(crl_check, Value) when (Value == best_effort) or (Value == peer) ->
+ Value;
+validate_option(crl_cache, {Cb, {_Handle, Options}} = Value) when is_atom(Cb) and is_list(Options) ->
+ Value;
validate_option(Opt, Value) ->
throw({error, {options, {Opt, Value}}}).
@@ -952,10 +963,7 @@ binary_cipher_suites(Version, [{_,_,_}| _] = Ciphers0) ->
binary_cipher_suites(Version, Ciphers);
binary_cipher_suites(Version, [Cipher0 | _] = Ciphers0) when is_binary(Cipher0) ->
- All = ssl_cipher:suites(Version)
- ++ ssl_cipher:anonymous_suites(Version)
- ++ ssl_cipher:psk_suites(Version)
- ++ ssl_cipher:srp_suites(),
+ All = ssl_cipher:all_suites(Version),
case [Cipher || Cipher <- Ciphers0, lists:member(Cipher, All)] of
[] ->
%% Defaults to all supported suites that does
@@ -1182,3 +1190,4 @@ handle_verify_options(Opts, CaCerts) ->
Value ->
throw({error, {options, {verify, Value}}})
end.
+
diff --git a/lib/ssl/src/ssl_alert.erl b/lib/ssl/src/ssl_alert.erl
index 78dc98bc25..9e372f739a 100644
--- a/lib/ssl/src/ssl_alert.erl
+++ b/lib/ssl/src/ssl_alert.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -161,5 +161,7 @@ description_txt(?BAD_CERTIFICATE_HASH_VALUE) ->
"bad certificate hash value";
description_txt(?UNKNOWN_PSK_IDENTITY) ->
"unknown psk identity";
+description_txt(?INAPPROPRIATE_FALLBACK) ->
+ "inappropriate fallback";
description_txt(Enum) ->
lists:flatten(io_lib:format("unsupported/unknown alert: ~p", [Enum])).
diff --git a/lib/ssl/src/ssl_alert.hrl b/lib/ssl/src/ssl_alert.hrl
index f4f1d74264..a3619e4a35 100644
--- a/lib/ssl/src/ssl_alert.hrl
+++ b/lib/ssl/src/ssl_alert.hrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -58,6 +58,7 @@
%% protocol_version(70),
%% insufficient_security(71),
%% internal_error(80),
+%% inappropriate_fallback(86),
%% user_canceled(90),
%% no_renegotiation(100),
%% RFC 4366
@@ -93,6 +94,7 @@
-define(PROTOCOL_VERSION, 70).
-define(INSUFFICIENT_SECURITY, 71).
-define(INTERNAL_ERROR, 80).
+-define(INAPPROPRIATE_FALLBACK, 86).
-define(USER_CANCELED, 90).
-define(NO_RENEGOTIATION, 100).
-define(UNSUPPORTED_EXTENSION, 110).
diff --git a/lib/ssl/src/ssl_certificate.erl b/lib/ssl/src/ssl_certificate.erl
index 9c0ed181fe..764bd82de0 100644
--- a/lib/ssl/src/ssl_certificate.erl
+++ b/lib/ssl/src/ssl_certificate.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014 All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015 All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -33,7 +33,8 @@
-export([trusted_cert_and_path/4,
certificate_chain/3,
file_to_certificats/2,
- validate_extension/3,
+ file_to_crls/2,
+ validate/3,
is_valid_extkey_usage/2,
is_valid_key_usage/2,
select_extension/2,
@@ -84,15 +85,18 @@ trusted_cert_and_path(CertChain, CertDbHandle, CertDbRef, PartialChainHandler) -
%%--------------------------------------------------------------------
-spec certificate_chain(undefined | binary(), db_handle(), certdb_ref()) ->
- {error, no_cert} | {ok, [der_cert()]}.
+ {error, no_cert} | {ok, #'OTPCertificate'{} | undefined, [der_cert()]}.
%%
%% Description: Return the certificate chain to send to peer.
%%--------------------------------------------------------------------
certificate_chain(undefined, _, _) ->
{error, no_cert};
-certificate_chain(OwnCert, CertDbHandle, CertsDbRef) ->
+certificate_chain(OwnCert, CertDbHandle, CertsDbRef) when is_binary(OwnCert) ->
ErlCert = public_key:pkix_decode_cert(OwnCert, otp),
- certificate_chain(ErlCert, OwnCert, CertDbHandle, CertsDbRef, [OwnCert]).
+ certificate_chain(ErlCert, OwnCert, CertDbHandle, CertsDbRef, [OwnCert]);
+certificate_chain(OwnCert, CertDbHandle, CertsDbRef) ->
+ DerCert = public_key:pkix_encode('OTPCertificate', OwnCert, otp),
+ certificate_chain(OwnCert, DerCert, CertDbHandle, CertsDbRef, [DerCert]).
%%--------------------------------------------------------------------
-spec file_to_certificats(binary(), term()) -> [der_cert()].
%%
@@ -101,29 +105,39 @@ certificate_chain(OwnCert, CertDbHandle, CertsDbRef) ->
file_to_certificats(File, DbHandle) ->
{ok, List} = ssl_manager:cache_pem_file(File, DbHandle),
[Bin || {'Certificate', Bin, not_encrypted} <- List].
+
%%--------------------------------------------------------------------
--spec validate_extension(term(), {extension, #'Extension'{}} | {bad_cert, atom()} | valid,
- term()) -> {valid, term()} |
- {fail, tuple()} |
- {unknown, term()}.
+-spec file_to_crls(binary(), term()) -> [der_cert()].
+%%
+%% Description: Return list of DER encoded certificates.
+%%--------------------------------------------------------------------
+file_to_crls(File, DbHandle) ->
+ {ok, List} = ssl_manager:cache_pem_file(File, DbHandle),
+ [Bin || {'CertificateList', Bin, not_encrypted} <- List].
+
+%%--------------------------------------------------------------------
+-spec validate(term(), {extension, #'Extension'{}} | {bad_cert, atom()} | valid,
+ term()) -> {valid, term()} |
+ {fail, tuple()} |
+ {unknown, term()}.
%%
%% Description: Validates ssl/tls specific extensions
%%--------------------------------------------------------------------
-validate_extension(_,{extension, #'Extension'{extnID = ?'id-ce-extKeyUsage',
- extnValue = KeyUse}}, Role) ->
+validate(_,{extension, #'Extension'{extnID = ?'id-ce-extKeyUsage',
+ extnValue = KeyUse}}, {Role, _,_, _, _}) ->
case is_valid_extkey_usage(KeyUse, Role) of
true ->
{valid, Role};
false ->
{fail, {bad_cert, invalid_ext_key_usage}}
end;
-validate_extension(_, {bad_cert, _} = Reason, _) ->
- {fail, Reason};
-validate_extension(_, {extension, _}, Role) ->
+validate(_, {extension, _}, Role) ->
{unknown, Role};
-validate_extension(_, valid, Role) ->
+validate(_, {bad_cert, _} = Reason, _) ->
+ {fail, Reason};
+validate(_, valid, Role) ->
{valid, Role};
-validate_extension(_, valid_peer, Role) ->
+validate(_, valid_peer, Role) ->
{valid, Role}.
%%--------------------------------------------------------------------
@@ -194,14 +208,14 @@ certificate_chain(OtpCert, _Cert, CertDbHandle, CertsDbRef, Chain) ->
%% certificate. The verification of the
%% cert chain will fail if guess is
%% incorrect.
- {ok, lists:reverse(Chain)}
+ {ok, undefined, lists:reverse(Chain)}
end;
{{ok, {SerialNr, Issuer}}, SelfSigned} ->
certificate_chain(CertDbHandle, CertsDbRef, Chain, SerialNr, Issuer, SelfSigned)
end.
-certificate_chain(_,_, Chain, _SerialNr, _Issuer, true) ->
- {ok, lists:reverse(Chain)};
+certificate_chain(_, _, [RootCert | _] = Chain, _, _, true) ->
+ {ok, RootCert, lists:reverse(Chain)};
certificate_chain(CertDbHandle, CertsDbRef, Chain, SerialNr, Issuer, _SelfSigned) ->
case ssl_manager:lookup_trusted_cert(CertDbHandle, CertsDbRef,
@@ -214,7 +228,7 @@ certificate_chain(CertDbHandle, CertsDbRef, Chain, SerialNr, Issuer, _SelfSigned
%% The trusted cert may be obmitted from the chain as the
%% counter part needs to have it anyway to be able to
%% verify it.
- {ok, lists:reverse(Chain)}
+ {ok, undefined, lists:reverse(Chain)}
end.
find_issuer(OtpCert, CertDbHandle) ->
@@ -282,7 +296,7 @@ other_issuer(OtpCert, CertDbHandle) ->
handle_path({BinCert, OTPCert}, Path, PartialChainHandler) ->
case public_key:pkix_is_self_signed(OTPCert) of
true ->
- {BinCert, Path};
+ {BinCert, lists:delete(BinCert, Path)};
false ->
handle_incomplete_chain(Path, PartialChainHandler)
end.
diff --git a/lib/ssl/src/ssl_cipher.erl b/lib/ssl/src/ssl_cipher.erl
index e1d89c149e..8584e56d6c 100644
--- a/lib/ssl/src/ssl_cipher.erl
+++ b/lib/ssl/src/ssl_cipher.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -33,11 +33,11 @@
-include_lib("public_key/include/public_key.hrl").
-export([security_parameters/2, security_parameters/3, suite_definition/1,
- cipher_init/3, decipher/5, cipher/5, decipher_aead/6, cipher_aead/6,
+ cipher_init/3, decipher/6, cipher/5, decipher_aead/6, cipher_aead/6,
suite/1, suites/1, all_suites/1,
ec_keyed_suites/0, anonymous_suites/1, psk_suites/1, srp_suites/0,
- openssl_suite/1, openssl_suite_name/1, filter/2, filter_suites/1,
- hash_algorithm/1, sign_algorithm/1, is_acceptable_hash/2]).
+ rc4_suites/1, openssl_suite/1, openssl_suite_name/1, filter/2, filter_suites/1,
+ hash_algorithm/1, sign_algorithm/1, is_acceptable_hash/2, is_fallback/1]).
-export_type([cipher_suite/0,
erl_cipher_suite/0, openssl_cipher_suite/0,
@@ -182,7 +182,8 @@ block_cipher(Fun, BlockSz, #cipher_state{key=Key, iv=IV} = CS0,
{T, CS0#cipher_state{iv=NextIV}}.
%%--------------------------------------------------------------------
--spec decipher(cipher_enum(), integer(), #cipher_state{}, binary(), ssl_record:ssl_version()) ->
+-spec decipher(cipher_enum(), integer(), #cipher_state{}, binary(),
+ ssl_record:ssl_version(), boolean()) ->
{binary(), binary(), #cipher_state{}} | #alert{}.
%%
%% Description: Decrypts the data and the MAC using cipher described
@@ -190,9 +191,9 @@ block_cipher(Fun, BlockSz, #cipher_state{key=Key, iv=IV} = CS0,
%% Used for "MAC then Cipher" suites where first the data is decrypted
%% and the an HMAC of the decrypted data is checked
%%-------------------------------------------------------------------
-decipher(?NULL, _HashSz, CipherState, Fragment, _) ->
+decipher(?NULL, _HashSz, CipherState, Fragment, _, _) ->
{Fragment, <<>>, CipherState};
-decipher(?RC4, HashSz, CipherState = #cipher_state{state = State0}, Fragment, _) ->
+decipher(?RC4, HashSz, CipherState = #cipher_state{state = State0}, Fragment, _, _) ->
try crypto:stream_decrypt(State0, Fragment) of
{State, Text} ->
GSC = generic_stream_cipher_from_bin(Text, HashSz),
@@ -208,20 +209,20 @@ decipher(?RC4, HashSz, CipherState = #cipher_state{state = State0}, Fragment, _)
?ALERT_REC(?FATAL, ?BAD_RECORD_MAC)
end;
-decipher(?DES, HashSz, CipherState, Fragment, Version) ->
+decipher(?DES, HashSz, CipherState, Fragment, Version, PaddingCheck) ->
block_decipher(fun(Key, IV, T) ->
crypto:block_decrypt(des_cbc, Key, IV, T)
- end, CipherState, HashSz, Fragment, Version);
-decipher(?'3DES', HashSz, CipherState, Fragment, Version) ->
+ end, CipherState, HashSz, Fragment, Version, PaddingCheck);
+decipher(?'3DES', HashSz, CipherState, Fragment, Version, PaddingCheck) ->
block_decipher(fun(<<K1:8/binary, K2:8/binary, K3:8/binary>>, IV, T) ->
crypto:block_decrypt(des3_cbc, [K1, K2, K3], IV, T)
- end, CipherState, HashSz, Fragment, Version);
-decipher(?AES_CBC, HashSz, CipherState, Fragment, Version) ->
+ end, CipherState, HashSz, Fragment, Version, PaddingCheck);
+decipher(?AES_CBC, HashSz, CipherState, Fragment, Version, PaddingCheck) ->
block_decipher(fun(Key, IV, T) when byte_size(Key) =:= 16 ->
crypto:block_decrypt(aes_cbc128, Key, IV, T);
(Key, IV, T) when byte_size(Key) =:= 32 ->
crypto:block_decrypt(aes_cbc256, Key, IV, T)
- end, CipherState, HashSz, Fragment, Version).
+ end, CipherState, HashSz, Fragment, Version, PaddingCheck).
%%--------------------------------------------------------------------
-spec decipher_aead(cipher_enum(), #cipher_state{}, integer(), binary(), binary(), ssl_record:ssl_version()) ->
@@ -237,7 +238,7 @@ decipher_aead(?CHACHA20_POLY1305, CipherState, SeqNo, AAD, Fragment, Version) ->
aead_decipher(chacha20_poly1305, CipherState, SeqNo, AAD, Fragment, Version).
block_decipher(Fun, #cipher_state{key=Key, iv=IV} = CipherState0,
- HashSz, Fragment, Version) ->
+ HashSz, Fragment, Version, PaddingCheck) ->
try
Text = Fun(Key, IV, Fragment),
NextIV = next_iv(Fragment, IV),
@@ -245,7 +246,7 @@ block_decipher(Fun, #cipher_state{key=Key, iv=IV} = CipherState0,
Content = GBC#generic_block_cipher.content,
Mac = GBC#generic_block_cipher.mac,
CipherState1 = CipherState0#cipher_state{iv=GBC#generic_block_cipher.next_iv},
- case is_correct_padding(GBC, Version) of
+ case is_correct_padding(GBC, Version, PaddingCheck) of
true ->
{Content, Mac, CipherState1};
false ->
@@ -306,9 +307,10 @@ suites({3, N}) ->
all_suites(Version) ->
suites(Version)
- ++ ssl_cipher:anonymous_suites(Version)
- ++ ssl_cipher:psk_suites(Version)
- ++ ssl_cipher:srp_suites().
+ ++ anonymous_suites(Version)
+ ++ psk_suites(Version)
+ ++ srp_suites()
+ ++ rc4_suites(Version).
%%--------------------------------------------------------------------
-spec anonymous_suites(ssl_record:ssl_version() | integer()) -> [cipher_suite()].
%%
@@ -394,6 +396,24 @@ srp_suites() ->
?TLS_SRP_SHA_WITH_AES_256_CBC_SHA,
?TLS_SRP_SHA_RSA_WITH_AES_256_CBC_SHA,
?TLS_SRP_SHA_DSS_WITH_AES_256_CBC_SHA].
+%%--------------------------------------------------------------------
+-spec rc4_suites(Version::ssl_record:ssl_version()) -> [cipher_suite()].
+%%
+%% Description: Returns a list of the RSA|(ECDH/RSA)| (ECDH/ECDSA)
+%% with RC4 cipher suites, only supported if explicitly set by user.
+%% Are not considered secure any more. Other RC4 suites already
+%% belonged to the user configured only category.
+%%--------------------------------------------------------------------
+rc4_suites({3, 0}) ->
+ [?TLS_RSA_WITH_RC4_128_SHA,
+ ?TLS_RSA_WITH_RC4_128_MD5];
+rc4_suites({3, N}) when N =< 3 ->
+ [?TLS_ECDHE_ECDSA_WITH_RC4_128_SHA,
+ ?TLS_ECDHE_RSA_WITH_RC4_128_SHA,
+ ?TLS_RSA_WITH_RC4_128_SHA,
+ ?TLS_RSA_WITH_RC4_128_MD5,
+ ?TLS_ECDH_ECDSA_WITH_RC4_128_SHA,
+ ?TLS_ECDH_RSA_WITH_RC4_128_SHA].
%%--------------------------------------------------------------------
-spec suite_definition(cipher_suite()) -> int_cipher_suite().
@@ -1422,6 +1442,9 @@ is_acceptable_prf(default_prf, _) ->
is_acceptable_prf(Prf, Algos) ->
proplists:get_bool(Prf, Algos).
+is_fallback(CipherSuites)->
+ lists:member(?TLS_FALLBACK_SCSV, CipherSuites).
+
%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
@@ -1632,16 +1655,18 @@ generic_stream_cipher_from_bin(T, HashSz) ->
#generic_stream_cipher{content=Content,
mac=Mac}.
-%% For interoperability reasons we do not check the padding content in
-%% SSL 3.0 and TLS 1.0 as it is not strictly required and breaks
-%% interopability with for instance Google.
is_correct_padding(#generic_block_cipher{padding_length = Len,
- padding = Padding}, {3, N})
- when N == 0; N == 1 ->
- Len == byte_size(Padding);
-%% Padding must be check in TLS 1.1 and after
+ padding = Padding}, {3, 0}, _) ->
+ Len == byte_size(Padding); %% Only length check is done in SSL 3.0 spec
+%% For interoperability reasons it is possible to disable
+%% the padding check when using TLS 1.0, as it is not strictly required
+%% in the spec (only recommended), howerver this makes TLS 1.0 vunrable to the Poodle attack
+%% so by default this clause will not match
+is_correct_padding(GenBlockCipher, {3, 1}, false) ->
+ is_correct_padding(GenBlockCipher, {3, 0}, false);
+%% Padding must be checked in TLS 1.1 and after
is_correct_padding(#generic_block_cipher{padding_length = Len,
- padding = Padding}, _) ->
+ padding = Padding}, _, _) ->
Len == byte_size(Padding) andalso
list_to_binary(lists:duplicate(Len, Len)) == Padding.
diff --git a/lib/ssl/src/ssl_cipher.hrl b/lib/ssl/src/ssl_cipher.hrl
index 448c2405aa..8689a3c68b 100644
--- a/lib/ssl/src/ssl_cipher.hrl
+++ b/lib/ssl/src/ssl_cipher.hrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -356,6 +356,10 @@
%% hello extension data as they should.
-define(TLS_EMPTY_RENEGOTIATION_INFO_SCSV, <<?BYTE(16#00), ?BYTE(16#FF)>>).
+%% TLS Fallback Signaling Cipher Suite Value (SCSV) for Preventing Protocol
+%% Downgrade Attacks
+-define(TLS_FALLBACK_SCSV, <<?BYTE(16#56), ?BYTE(16#00)>>).
+
%%% PSK Cipher Suites RFC 4279
%% TLS_PSK_WITH_RC4_128_SHA = { 0x00, 0x8A };
diff --git a/lib/ssl/src/ssl_config.erl b/lib/ssl/src/ssl_config.erl
index 545b8aa0f6..fc8b214a29 100644
--- a/lib/ssl/src/ssl_config.erl
+++ b/lib/ssl/src/ssl_config.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -31,13 +31,13 @@ init(SslOpts, Role) ->
init_manager_name(SslOpts#ssl_options.erl_dist),
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, OwnCert}
+ {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbHandle, OwnCert}
= init_certificates(SslOpts, Role),
PrivateKey =
init_private_key(PemCacheHandle, SslOpts#ssl_options.key, SslOpts#ssl_options.keyfile,
SslOpts#ssl_options.password, Role),
DHParams = init_diffie_hellman(PemCacheHandle, SslOpts#ssl_options.dh, SslOpts#ssl_options.dhfile, Role),
- {ok, CertDbRef, CertDbHandle, FileRefHandle, CacheHandle, OwnCert, PrivateKey, DHParams}.
+ {ok, CertDbRef, CertDbHandle, FileRefHandle, CacheHandle, CRLDbHandle, OwnCert, PrivateKey, DHParams}.
init_manager_name(false) ->
put(ssl_manager, ssl_manager:manager_name(normal));
@@ -46,9 +46,11 @@ init_manager_name(true) ->
init_certificates(#ssl_options{cacerts = CaCerts,
cacertfile = CACertFile,
- certfile = CertFile,
- cert = Cert}, Role) ->
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle} =
+ certfile = CertFile,
+ cert = Cert,
+ crl_cache = CRLCache
+ }, Role) ->
+ {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo} =
try
Certs = case CaCerts of
undefined ->
@@ -56,39 +58,40 @@ init_certificates(#ssl_options{cacerts = CaCerts,
_ ->
{der, CaCerts}
end,
- {ok, _, _, _, _, _} = ssl_manager:connection_init(Certs, Role)
+ {ok, _, _, _, _, _, _} = ssl_manager:connection_init(Certs, Role, CRLCache)
catch
_:Reason ->
file_error(CACertFile, {cacertfile, Reason})
end,
init_certificates(Cert, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle,
- CacheHandle, CertFile, Role).
+ CacheHandle, CRLDbInfo, CertFile, Role).
-init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, <<>>, _) ->
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, undefined};
+init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle,
+ CRLDbInfo, <<>>, _) ->
+ {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo, undefined};
init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle,
- CacheHandle, CertFile, client) ->
+ CacheHandle, CRLDbInfo, CertFile, client) ->
try
%% Ignoring potential proxy-certificates see:
%% http://dev.globus.org/wiki/Security/ProxyFileFormat
[OwnCert|_] = ssl_certificate:file_to_certificats(CertFile, PemCacheHandle),
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, OwnCert}
+ {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo, OwnCert}
catch _Error:_Reason ->
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, undefined}
+ {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo, undefined}
end;
init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle,
- PemCacheHandle, CacheRef, CertFile, server) ->
+ PemCacheHandle, CacheRef, CRLDbInfo, CertFile, server) ->
try
[OwnCert|_] = ssl_certificate:file_to_certificats(CertFile, PemCacheHandle),
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, OwnCert}
+ {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, CRLDbInfo, OwnCert}
catch
_:Reason ->
file_error(CertFile, {certfile, Reason})
end;
-init_certificates(Cert, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, _, _) ->
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, Cert}.
+init_certificates(Cert, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, CRLDbInfo, _, _) ->
+ {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, CRLDbInfo, Cert}.
init_private_key(_, undefined, <<>>, _Password, _Client) ->
undefined;
diff --git a/lib/ssl/src/ssl_connection.erl b/lib/ssl/src/ssl_connection.erl
index b6059eac58..08d0145aa7 100644
--- a/lib/ssl/src/ssl_connection.erl
+++ b/lib/ssl/src/ssl_connection.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2013-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -411,11 +411,15 @@ certify(#certificate{} = Cert,
role = Role,
cert_db = CertDbHandle,
cert_db_ref = CertDbRef,
+ crl_db = CRLDbInfo,
ssl_options = Opts} = State, Connection) ->
- case ssl_handshake:certify(Cert, CertDbHandle, CertDbRef, Opts#ssl_options.depth,
+ case ssl_handshake:certify(Cert, CertDbHandle, CertDbRef,
+ Opts#ssl_options.depth,
Opts#ssl_options.verify,
Opts#ssl_options.verify_fun,
Opts#ssl_options.partial_chain,
+ Opts#ssl_options.crl_check,
+ CRLDbInfo,
Role) of
{PeerCert, PublicKeyInfo} ->
handle_peer_cert(Role, PeerCert, PublicKeyInfo,
@@ -964,7 +968,7 @@ format_status(terminate, [_, State]) ->
%%% Internal functions
%%--------------------------------------------------------------------
ssl_config(Opts, Role, State) ->
- {ok, Ref, CertDbHandle, FileRefHandle, CacheHandle, OwnCert, Key, DHParams} =
+ {ok, Ref, CertDbHandle, FileRefHandle, CacheHandle, CRLDbInfo, OwnCert, Key, DHParams} =
ssl_config:init(Opts, Role),
Handshake = ssl_handshake:init_handshake_history(),
TimeStamp = calendar:datetime_to_gregorian_seconds({date(), time()}),
@@ -975,6 +979,7 @@ ssl_config(Opts, Role, State) ->
file_ref_db = FileRefHandle,
cert_db_ref = Ref,
cert_db = CertDbHandle,
+ crl_db = CRLDbInfo,
session_cache = CacheHandle,
private_key = Key,
diffie_hellman_params = DHParams,
diff --git a/lib/ssl/src/ssl_connection.hrl b/lib/ssl/src/ssl_connection.hrl
index b9a1ef3a84..ac3b26e4bf 100644
--- a/lib/ssl/src/ssl_connection.hrl
+++ b/lib/ssl/src/ssl_connection.hrl
@@ -1,8 +1,7 @@
-
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2013-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -53,6 +52,7 @@
session :: #session{} | secret_printout(),
session_cache :: db_handle(),
session_cache_cb :: atom(),
+ crl_db :: term(),
negotiated_version :: ssl_record:ssl_version(),
client_certificate_requested = false :: boolean(),
key_algorithm :: ssl_cipher:key_algo(),
diff --git a/lib/ssl/src/ssl_crl.erl b/lib/ssl/src/ssl_crl.erl
new file mode 100644
index 0000000000..b8761f0601
--- /dev/null
+++ b/lib/ssl/src/ssl_crl.erl
@@ -0,0 +1,82 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+
+%----------------------------------------------------------------------
+%% Purpose: CRL handling
+%%----------------------------------------------------------------------
+
+-module(ssl_crl).
+
+-include("ssl_alert.hrl").
+-include("ssl_internal.hrl").
+-include_lib("public_key/include/public_key.hrl").
+
+-export([trusted_cert_and_path/3]).
+
+trusted_cert_and_path(CRL, {SerialNumber, Issuer},{Db, DbRef} = DbHandle) ->
+ case ssl_pkix_db:lookup_trusted_cert(Db, DbRef, SerialNumber, Issuer) of
+ undefined ->
+ trusted_cert_and_path(CRL, issuer_not_found, DbHandle);
+ {ok, {_, OtpCert}} ->
+ {ok, Root, Chain} = ssl_certificate:certificate_chain(OtpCert, Db, DbRef),
+ {ok, Root, lists:reverse(Chain)}
+ end;
+
+trusted_cert_and_path(CRL, issuer_not_found, {Db, DbRef} = DbHandle) ->
+ try find_issuer(CRL, DbHandle) of
+ OtpCert ->
+ {ok, Root, Chain} = ssl_certificate:certificate_chain(OtpCert, Db, DbRef),
+ {ok, Root, lists:reverse(Chain)}
+ catch
+ throw:_ ->
+ {error, issuer_not_found}
+ end.
+
+find_issuer(CRL, {Db,_}) ->
+ Issuer = public_key:pkix_normalize_name(public_key:pkix_crl_issuer(CRL)),
+ IsIssuerFun =
+ fun({_Key, {_Der,ErlCertCandidate}}, Acc) ->
+ verify_crl_issuer(CRL, ErlCertCandidate, Issuer, Acc);
+ (_, Acc) ->
+ Acc
+ end,
+
+ try ssl_pkix_db:foldl(IsIssuerFun, issuer_not_found, Db) of
+ issuer_not_found ->
+ {error, issuer_not_found}
+ catch
+ {ok, IssuerCert} ->
+ IssuerCert
+ end.
+
+
+verify_crl_issuer(CRL, ErlCertCandidate, Issuer, NotIssuer) ->
+ TBSCert = ErlCertCandidate#'OTPCertificate'.tbsCertificate,
+ case public_key:pkix_normalize_name(TBSCert#'OTPTBSCertificate'.subject) of
+ Issuer ->
+ case public_key:pkix_crl_verify(CRL, ErlCertCandidate) of
+ true ->
+ throw({ok, ErlCertCandidate});
+ false ->
+ NotIssuer;
+ _ ->
+ NotIssuer
+ end;
+ _ ->
+ NotIssuer
+ end.
diff --git a/lib/ssl/src/ssl_crl_cache.erl b/lib/ssl/src/ssl_crl_cache.erl
new file mode 100644
index 0000000000..b2bdb19979
--- /dev/null
+++ b/lib/ssl/src/ssl_crl_cache.erl
@@ -0,0 +1,179 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+
+%----------------------------------------------------------------------
+%% Purpose: Simple default CRL cache
+%%----------------------------------------------------------------------
+
+-module(ssl_crl_cache).
+
+-include("ssl_internal.hrl").
+-include_lib("public_key/include/public_key.hrl").
+
+-behaviour(ssl_crl_cache_api).
+
+-export([lookup/2, select/2, fresh_crl/2]).
+-export([insert/1, insert/2, delete/1]).
+
+%%====================================================================
+%% Cache callback API
+%%====================================================================
+
+lookup(#'DistributionPoint'{distributionPoint={fullName, Names}},
+ CRLDbInfo) ->
+ get_crls(Names, CRLDbInfo);
+lookup(_,_) ->
+ not_available.
+
+select(Issuer, {{_Cache, Mapping},_}) ->
+ case ssl_pkix_db:lookup(Issuer, Mapping) of
+ undefined ->
+ [];
+ CRLs ->
+ CRLs
+ end.
+
+fresh_crl(DistributionPoint, CRL) ->
+ case get_crls(DistributionPoint, undefined) of
+ not_available ->
+ CRL;
+ [NewCRL] ->
+ NewCRL
+ end.
+
+%%====================================================================
+%% API
+%%====================================================================
+
+insert(CRLs) ->
+ insert(?NO_DIST_POINT, CRLs).
+
+insert(URI, {file, File}) when is_list(URI) ->
+ case file:read_file(File) of
+ {ok, PemBin} ->
+ PemEntries = public_key:pem_decode(PemBin),
+ CRLs = [ CRL || {'CertificateList', CRL, not_encrypted}
+ <- PemEntries],
+ do_insert(URI, CRLs);
+ Error ->
+ Error
+ end;
+insert(URI, {der, CRLs}) ->
+ do_insert(URI, CRLs).
+
+delete({file, File}) ->
+ case file:read_file(File) of
+ {ok, PemBin} ->
+ PemEntries = public_key:pem_decode(PemBin),
+ CRLs = [ CRL || {'CertificateList', CRL, not_encrypted}
+ <- PemEntries],
+ ssl_manager:delete_crls({?NO_DIST_POINT, CRLs});
+ Error ->
+ Error
+ end;
+delete({der, CRLs}) ->
+ ssl_manager:delete_crls({?NO_DIST_POINT, CRLs});
+
+delete(URI) ->
+ case http_uri:parse(URI) of
+ {ok, {http, _, _ , _, Path,_}} ->
+ ssl_manager:delete_crls(string:strip(Path, left, $/));
+ _ ->
+ {error, {only_http_distribution_points_supported, URI}}
+ end.
+
+%%--------------------------------------------------------------------
+%%% Internal functions
+%%--------------------------------------------------------------------
+do_insert(URI, CRLs) ->
+ case http_uri:parse(URI) of
+ {ok, {http, _, _ , _, Path,_}} ->
+ ssl_manager:insert_crls(string:strip(Path, left, $/), CRLs);
+ _ ->
+ {error, {only_http_distribution_points_supported, URI}}
+ end.
+
+get_crls([], _) ->
+ not_available;
+get_crls([{uniformResourceIdentifier, "http"++_ = URL} | Rest],
+ CRLDbInfo) ->
+ case cache_lookup(URL, CRLDbInfo) of
+ [] ->
+ handle_http(URL, Rest, CRLDbInfo);
+ CRLs ->
+ CRLs
+ end;
+get_crls([ _| Rest], CRLDbInfo) ->
+ %% unsupported CRL location
+ get_crls(Rest, CRLDbInfo).
+
+http_lookup(URL, Rest, CRLDbInfo, Timeout) ->
+ case application:ensure_started(inets) of
+ ok ->
+ http_get(URL, Rest, CRLDbInfo, Timeout);
+ _ ->
+ get_crls(Rest, CRLDbInfo)
+ end.
+
+http_get(URL, Rest, CRLDbInfo, Timeout) ->
+ case httpc:request(get, {URL, [{"connection", "close"}]},
+ [{timeout, Timeout}], [{body_format, binary}]) of
+ {ok, {_Status, _Headers, Body}} ->
+ case Body of
+ <<"-----BEGIN", _/binary>> ->
+ Pem = public_key:pem_decode(Body),
+ lists:filtermap(fun({'CertificateList',
+ CRL, not_encrypted}) ->
+ {true, CRL};
+ (_) ->
+ false
+ end, Pem);
+ _ ->
+ try public_key:der_decode('CertificateList', Body) of
+ _ ->
+ [Body]
+ catch
+ _:_ ->
+ get_crls(Rest, CRLDbInfo)
+ end
+ end;
+ {error, _Reason} ->
+ get_crls(Rest, CRLDbInfo)
+ end.
+
+cache_lookup(_, undefined) ->
+ [];
+cache_lookup(URL, {{Cache, _}, _}) ->
+ {ok, {_, _, _ , _, Path,_}} = http_uri:parse(URL),
+ case ssl_pkix_db:lookup(string:strip(Path, left, $/), Cache) of
+ undefined ->
+ [];
+ CRLs ->
+ CRLs
+ end.
+
+handle_http(URI, Rest, {_, [{http, Timeout}]} = CRLDbInfo) ->
+ CRLs = http_lookup(URI, Rest, CRLDbInfo, Timeout),
+ %% Uncomment to improve performance, but need to
+ %% implement cache limit and or cleaning to prevent
+ %% DoS attack possibilities
+ %%insert(URI, {der, CRLs}),
+ CRLs;
+handle_http(_, Rest, CRLDbInfo) ->
+ get_crls(Rest, CRLDbInfo).
+
diff --git a/lib/ssl/src/ssl_crl_cache_api.erl b/lib/ssl/src/ssl_crl_cache_api.erl
new file mode 100644
index 0000000000..0915ba12e5
--- /dev/null
+++ b/lib/ssl/src/ssl_crl_cache_api.erl
@@ -0,0 +1,30 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+
+-module(ssl_crl_cache_api).
+
+-include_lib("public_key/include/public_key.hrl").
+
+-type db_handle() :: term().
+
+-callback lookup(#'DistributionPoint'{}, db_handle()) -> not_available | [public_key:der_encode()].
+-callback select(term(), db_handle()) -> [public_key:der_encode()].
+-callback fresh_crl(#'DistributionPoint'{}, public_key:der_encode()) -> public_key:der_encode().
diff --git a/lib/ssl/src/ssl_handshake.erl b/lib/ssl/src/ssl_handshake.erl
index 07535e79b4..6cab8eb7a1 100644
--- a/lib/ssl/src/ssl_handshake.erl
+++ b/lib/ssl/src/ssl_handshake.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2013-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -49,7 +49,7 @@
finished/5, next_protocol/1]).
%% Handle handshake messages
--export([certify/8, client_certificate_verify/6, certificate_verify/6, verify_signature/5,
+-export([certify/10, client_certificate_verify/6, certificate_verify/6, verify_signature/5,
master_secret/5, server_key_exchange_hash/2, verify_connection/6,
init_handshake_history/0, update_handshake_history/2, verify_server_key/5
]).
@@ -149,7 +149,7 @@ client_hello_extensions(Host, Version, CipherSuites, SslOpts, ConnectionStates,
certificate(OwnCert, CertDbHandle, CertDbRef, client) ->
Chain =
case ssl_certificate:certificate_chain(OwnCert, CertDbHandle, CertDbRef) of
- {ok, CertChain} ->
+ {ok, _, CertChain} ->
CertChain;
{error, _} ->
%% If no suitable certificate is available, the client
@@ -161,7 +161,7 @@ certificate(OwnCert, CertDbHandle, CertDbRef, client) ->
certificate(OwnCert, CertDbHandle, CertDbRef, server) ->
case ssl_certificate:certificate_chain(OwnCert, CertDbHandle, CertDbRef) of
- {ok, Chain} ->
+ {ok, _, Chain} ->
#certificate{asn1_certificates = Chain};
{error, _} ->
?ALERT_REC(?FATAL, ?INTERNAL_ERROR)
@@ -383,49 +383,24 @@ verify_signature(_Version, Hash, {HashAlgo, ecdsa}, Signature,
%%--------------------------------------------------------------------
-spec certify(#certificate{}, db_handle(), certdb_ref(), integer() | nolimit,
- verify_peer | verify_none, {fun(), term}, fun(),
+ verify_peer | verify_none, {fun(), term}, fun(), term(), term(),
client | server) -> {der_cert(), public_key_info()} | #alert{}.
%%
%% Description: Handles a certificate handshake message
%%--------------------------------------------------------------------
certify(#certificate{asn1_certificates = ASN1Certs}, CertDbHandle, CertDbRef,
- MaxPathLen, _Verify, VerifyFunAndState, PartialChain, Role) ->
+ MaxPathLen, _Verify, ValidationFunAndState0, PartialChain, CRLCheck, CRLDbHandle, Role) ->
[PeerCert | _] = ASN1Certs,
-
- ValidationFunAndState =
- case VerifyFunAndState of
- undefined ->
- {fun(OtpCert, ExtensionOrVerifyResult, SslState) ->
- ssl_certificate:validate_extension(OtpCert,
- ExtensionOrVerifyResult, SslState)
- end, Role};
- {Fun, UserState0} ->
- {fun(OtpCert, {extension, _} = Extension, {SslState, UserState}) ->
- case ssl_certificate:validate_extension(OtpCert,
- Extension,
- SslState) of
- {valid, NewSslState} ->
- {valid, {NewSslState, UserState}};
- {fail, Reason} ->
- apply_user_fun(Fun, OtpCert, Reason, UserState,
- SslState);
- {unknown, _} ->
- apply_user_fun(Fun, OtpCert,
- Extension, UserState, SslState)
- end;
- (OtpCert, VerifyResult, {SslState, UserState}) ->
- apply_user_fun(Fun, OtpCert, VerifyResult, UserState,
- SslState)
- end, {Role, UserState0}}
- end,
+
+ ValidationFunAndState = validation_fun_and_state(ValidationFunAndState0, Role,
+ CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle),
try
- {TrustedErlCert, CertPath} =
+ {TrustedCert, CertPath} =
ssl_certificate:trusted_cert_and_path(ASN1Certs, CertDbHandle, CertDbRef, PartialChain),
- case public_key:pkix_path_validation(TrustedErlCert,
- CertPath,
- [{max_path_length,
- MaxPathLen},
+ case public_key:pkix_path_validation(TrustedCert,
+ CertPath,
+ [{max_path_length, MaxPathLen},
{verify_fun, ValidationFunAndState}]) of
{ok, {PublicKeyInfo,_}} ->
{PeerCert, PublicKeyInfo};
@@ -1374,15 +1349,66 @@ sni1(Hostname) ->
%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
+validation_fun_and_state({Fun, UserState0}, Role, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle) ->
+ {fun(OtpCert, {extension, _} = Extension, {SslState, UserState}) ->
+ case ssl_certificate:validate(OtpCert,
+ Extension,
+ SslState) of
+ {valid, NewSslState} ->
+ {valid, {NewSslState, UserState}};
+ {fail, Reason} ->
+ apply_user_fun(Fun, OtpCert, Reason, UserState,
+ SslState);
+ {unknown, _} ->
+ apply_user_fun(Fun, OtpCert,
+ Extension, UserState, SslState)
+ end;
+ (OtpCert, VerifyResult, {SslState, UserState}) ->
+ apply_user_fun(Fun, OtpCert, VerifyResult, UserState,
+ SslState)
+ end, {{Role, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle}, UserState0}};
+validation_fun_and_state(undefined, Role, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle) ->
+ {fun(OtpCert, {extension, _} = Extension, SslState) ->
+ ssl_certificate:validate(OtpCert,
+ Extension,
+ SslState);
+ (OtpCert, VerifyResult, SslState) when (VerifyResult == valid) or (VerifyResult == valid_peer) ->
+ case crl_check(OtpCert, CRLCheck, CertDbHandle, CertDbRef, CRLDbHandle, VerifyResult) of
+ valid ->
+ {VerifyResult, SslState};
+ Reason ->
+ {fail, Reason}
+ end;
+ (OtpCert, VerifyResult, SslState) ->
+ ssl_certificate:validate(OtpCert,
+ VerifyResult,
+ SslState)
+ end, {Role, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle}}.
+
+apply_user_fun(Fun, OtpCert, VerifyResult, UserState0,
+ {_, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle} = SslState) when
+ (VerifyResult == valid) or (VerifyResult == valid_peer) ->
+ case Fun(OtpCert, VerifyResult, UserState0) of
+ {Valid, UserState} when (Valid == valid) or (Valid == valid_peer) ->
+ case crl_check(OtpCert, CRLCheck, CertDbHandle, CertDbRef, CRLDbHandle, VerifyResult) of
+ valid ->
+ {Valid, {SslState, UserState}};
+ Result ->
+ apply_user_fun(Fun, OtpCert, Result, UserState, SslState)
+ end;
+ {fail, _} = Fail ->
+ Fail
+ end;
apply_user_fun(Fun, OtpCert, ExtensionOrError, UserState0, SslState) ->
case Fun(OtpCert, ExtensionOrError, UserState0) of
- {valid, UserState} ->
- {valid, {SslState, UserState}};
+ {Valid, UserState} when (Valid == valid) or (Valid == valid_peer)->
+ {Valid, {SslState, UserState}};
{fail, _} = Fail ->
Fail;
{unknown, UserState} ->
{unknown, {SslState, UserState}}
end.
+
path_validation_alert({bad_cert, cert_expired}) ->
?ALERT_REC(?FATAL, ?CERTIFICATE_EXPIRED);
path_validation_alert({bad_cert, invalid_issuer}) ->
@@ -1393,8 +1419,10 @@ path_validation_alert({bad_cert, name_not_permitted}) ->
?ALERT_REC(?FATAL, ?BAD_CERTIFICATE);
path_validation_alert({bad_cert, unknown_critical_extension}) ->
?ALERT_REC(?FATAL, ?UNSUPPORTED_CERTIFICATE);
-path_validation_alert({bad_cert, cert_revoked}) ->
+path_validation_alert({bad_cert, {revoked, _}}) ->
?ALERT_REC(?FATAL, ?CERTIFICATE_REVOKED);
+path_validation_alert({bad_cert, revocation_status_undetermined}) ->
+ ?ALERT_REC(?FATAL, ?BAD_CERTIFICATE);
path_validation_alert({bad_cert, selfsigned_peer}) ->
?ALERT_REC(?FATAL, ?BAD_CERTIFICATE);
path_validation_alert({bad_cert, unknown_ca}) ->
@@ -1954,3 +1982,70 @@ handle_psk_identity(_PSKIdentity, LookupFun)
error;
handle_psk_identity(PSKIdentity, {Fun, UserState}) ->
Fun(psk, PSKIdentity, UserState).
+
+crl_check(_, false, _,_,_, _) ->
+ valid;
+crl_check(_, peer, _, _,_, valid) -> %% Do not check CAs with this option.
+ valid;
+crl_check(OtpCert, Check, CertDbHandle, CertDbRef, {Callback, CRLDbHandle}, _) ->
+ Options = [{issuer_fun, {fun(_DP, CRL, Issuer, DBInfo) ->
+ ssl_crl:trusted_cert_and_path(CRL, Issuer, DBInfo)
+ end, {CertDbHandle, CertDbRef}}},
+ {update_crl, fun(DP, CRL) -> Callback:fresh_crl(DP, CRL) end}
+ ],
+ case dps_and_crls(OtpCert, Callback, CRLDbHandle, ext) of
+ no_dps ->
+ case dps_and_crls(OtpCert, Callback, CRLDbHandle, same_issuer) of
+ [] ->
+ valid; %% No relevant CRL existed
+ Dps ->
+ crl_check_same_issuer(OtpCert, Check, Dps, Options)
+ end;
+ Dps -> %% This DP list may be empty if relevant CRLs existed
+ %% but could not be retrived, will result in {bad_cert, revocation_status_undetermined}
+ case public_key:pkix_crls_validate(OtpCert, Dps, Options) of
+ {bad_cert, revocation_status_undetermined} ->
+ crl_check_same_issuer(OtpCert, Check, dps_and_crls(OtpCert, Callback,
+ CRLDbHandle, same_issuer), Options);
+ Other ->
+ Other
+ end
+ end.
+
+crl_check_same_issuer(OtpCert, best_effort, Dps, Options) ->
+ case public_key:pkix_crls_validate(OtpCert, Dps, Options) of
+ {bad_cert, revocation_status_undetermined} ->
+ valid;
+ Other ->
+ Other
+ end;
+crl_check_same_issuer(OtpCert, _, Dps, Options) ->
+ public_key:pkix_crls_validate(OtpCert, Dps, Options).
+
+dps_and_crls(OtpCert, Callback, CRLDbHandle, ext) ->
+ case public_key:pkix_dist_points(OtpCert) of
+ [] ->
+ no_dps;
+ DistPoints ->
+ distpoints_lookup(DistPoints, Callback, CRLDbHandle)
+ end;
+
+dps_and_crls(OtpCert, Callback, CRLDbHandle, same_issuer) ->
+ DP = #'DistributionPoint'{distributionPoint = {fullName, GenNames}} =
+ public_key:pkix_dist_point(OtpCert),
+ CRLs = lists:flatmap(fun({directoryName, Issuer}) ->
+ Callback:select(Issuer, CRLDbHandle);
+ (_) ->
+ []
+ end, GenNames),
+ [{DP, {CRL, public_key:der_decode('CertificateList', CRL)}} || CRL <- CRLs].
+
+distpoints_lookup([], _, _) ->
+ [];
+distpoints_lookup([DistPoint | Rest], Callback, CRLDbHandle) ->
+ case Callback:lookup(DistPoint, CRLDbHandle) of
+ not_available ->
+ distpoints_lookup(Rest, Callback, CRLDbHandle);
+ CRLs ->
+ [{DistPoint, {CRL, public_key:der_decode('CertificateList', CRL)}} || CRL <- CRLs]
+ end.
diff --git a/lib/ssl/src/ssl_internal.hrl b/lib/ssl/src/ssl_internal.hrl
index 75efb64e3f..8df79f9e8c 100644
--- a/lib/ssl/src/ssl_internal.hrl
+++ b/lib/ssl/src/ssl_internal.hrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -61,14 +61,19 @@
-define(CDR_HDR_SIZE, 12).
-define(DEFAULT_TIMEOUT, 5000).
+-define(NO_DIST_POINT, "http://dummy/no_distribution_point").
+-define(NO_DIST_POINT_PATH, "dummy/no_distribution_point").
%% Common enumerate values in for SSL-protocols
-define(NULL, 0).
-define(TRUE, 0).
-define(FALSE, 1).
--define(ALL_SUPPORTED_VERSIONS, ['tlsv1.2', 'tlsv1.1', tlsv1, sslv3]).
--define(MIN_SUPPORTED_VERSIONS, ['tlsv1.1', tlsv1, sslv3]).
+%% sslv3 is considered insecure due to lack of padding check (Poodle attack)
+%% Keep as interop with legacy software but do not support as default
+-define(ALL_AVAILABLE_VERSIONS, ['tlsv1.2', 'tlsv1.1', tlsv1, sslv3]).
+-define(ALL_SUPPORTED_VERSIONS, ['tlsv1.2', 'tlsv1.1', tlsv1]).
+-define(MIN_SUPPORTED_VERSIONS, ['tlsv1.1', tlsv1]).
-define(ALL_DATAGRAM_SUPPORTED_VERSIONS, ['dtlsv1.2', dtlsv1]).
-define(MIN_DATAGRAM_SUPPORTED_VERSIONS, ['dtlsv1.2', dtlsv1]).
@@ -117,7 +122,11 @@
server_name_indication = undefined,
%% Should the server prefer its own cipher order over the one provided by
%% the client?
- honor_cipher_order = false
+ honor_cipher_order = false,
+ padding_check = true,
+ fallback = false,
+ crl_check,
+ crl_cache
}).
-record(socket_options,
diff --git a/lib/ssl/src/ssl_manager.erl b/lib/ssl/src/ssl_manager.erl
index 5553fc9220..9c4b2a8bad 100644
--- a/lib/ssl/src/ssl_manager.erl
+++ b/lib/ssl/src/ssl_manager.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -26,14 +26,15 @@
%% Internal application API
-export([start_link/1, start_link_dist/1,
- connection_init/2, cache_pem_file/2,
+ connection_init/3, cache_pem_file/2,
lookup_trusted_cert/4,
new_session_id/1, clean_cert_db/2,
register_session/2, register_session/3, invalidate_session/2,
- invalidate_session/3, clear_pem_cache/0, manager_name/1]).
+ insert_crls/2, insert_crls/3, delete_crls/1, delete_crls/2,
+ invalidate_session/3, invalidate_pem/1, clear_pem_cache/0, manager_name/1]).
% Spawn export
--export([init_session_validator/1]).
+-export([init_session_validator/1, init_pem_cache_validator/1]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
@@ -50,7 +51,9 @@
session_lifetime,
certificate_db,
session_validation_timer,
- last_delay_timer = {undefined, undefined}%% Keep for testing purposes
+ last_delay_timer = {undefined, undefined},%% Keep for testing purposes
+ last_pem_check,
+ clear_pem_cache
}).
-define('24H_in_msec', 86400000).
@@ -98,19 +101,19 @@ start_link_dist(Opts) ->
gen_server:start_link({local, DistMangerName}, ?MODULE, [DistMangerName, Opts], []).
%%--------------------------------------------------------------------
--spec connection_init(binary()| {der, list()}, client | server) ->
+-spec connection_init(binary()| {der, list()}, client | server, {Cb :: atom(), Handle:: term()}) ->
{ok, certdb_ref(), db_handle(), db_handle(), db_handle(), db_handle()}.
%%
%% Description: Do necessary initializations for a new connection.
%%--------------------------------------------------------------------
-connection_init({der, _} = Trustedcerts, Role) ->
- call({connection_init, Trustedcerts, Role});
+connection_init({der, _} = Trustedcerts, Role, CRLCache) ->
+ call({connection_init, Trustedcerts, Role, CRLCache});
-connection_init(<<>> = Trustedcerts, Role) ->
- call({connection_init, Trustedcerts, Role});
+connection_init(<<>> = Trustedcerts, Role, CRLCache) ->
+ call({connection_init, Trustedcerts, Role, CRLCache});
-connection_init(Trustedcerts, Role) ->
- call({connection_init, Trustedcerts, Role}).
+connection_init(Trustedcerts, Role, CRLCache) ->
+ call({connection_init, Trustedcerts, Role, CRLCache}).
%%--------------------------------------------------------------------
-spec cache_pem_file(binary(), term()) -> {ok, term()} | {error, reason()}.
@@ -118,14 +121,13 @@ connection_init(Trustedcerts, Role) ->
%% Description: Cache a pem file and return its content.
%%--------------------------------------------------------------------
cache_pem_file(File, DbHandle) ->
- MD5 = crypto:hash(md5, File),
- case ssl_pkix_db:lookup_cached_pem(DbHandle, MD5) of
+ case ssl_pkix_db:lookup_cached_pem(DbHandle, File) of
[{Content,_}] ->
{ok, Content};
[Content] ->
- {ok, Content};
+ {ok, Content};
undefined ->
- call({cache_pem, {MD5, File}})
+ call({cache_pem, File})
end.
%%--------------------------------------------------------------------
@@ -192,6 +194,28 @@ invalidate_session(Host, Port, Session) ->
invalidate_session(Port, Session) ->
cast({invalidate_session, Port, Session}).
+-spec invalidate_pem(File::binary()) -> ok.
+invalidate_pem(File) ->
+ cast({invalidate_pem, File}).
+
+insert_crls(Path, CRLs)->
+ insert_crls(Path, CRLs, normal).
+insert_crls(?NO_DIST_POINT_PATH = Path, CRLs, ManagerType)->
+ put(ssl_manager, manager_name(ManagerType)),
+ cast({insert_crls, Path, CRLs});
+insert_crls(Path, CRLs, ManagerType)->
+ put(ssl_manager, manager_name(ManagerType)),
+ call({insert_crls, Path, CRLs}).
+
+delete_crls(Path)->
+ delete_crls(Path, normal).
+delete_crls(?NO_DIST_POINT_PATH = Path, ManagerType)->
+ put(ssl_manager, manager_name(ManagerType)),
+ cast({delete_crls, Path});
+delete_crls(Path, ManagerType)->
+ put(ssl_manager, manager_name(ManagerType)),
+ call({delete_crls, Path}).
+
%%====================================================================
%% gen_server callbacks
%%====================================================================
@@ -216,13 +240,17 @@ init([Name, Opts]) ->
proplists:get_value(session_cb_init_args, Opts, [])]),
Timer = erlang:send_after(SessionLifeTime * 1000 + 5000,
self(), validate_sessions),
- erlang:send_after(?CLEAR_PEM_CACHE, self(), clear_pem_cache),
+ Interval = pem_check_interval(),
+ erlang:send_after(Interval, self(), clear_pem_cache),
{ok, #state{certificate_db = CertDb,
session_cache_client = ClientSessionCache,
session_cache_server = ServerSessionCache,
session_cache_cb = CacheCb,
session_lifetime = SessionLifeTime,
- session_validation_timer = Timer}}.
+ session_validation_timer = Timer,
+ last_pem_check = os:timestamp(),
+ clear_pem_cache = Interval
+ }}.
%%--------------------------------------------------------------------
-spec handle_call(msg(), from(), #state{}) -> {reply, reply(), #state{}}.
@@ -235,51 +263,39 @@ init([Name, Opts]) ->
%%
%% Description: Handling call messages
%%--------------------------------------------------------------------
-handle_call({{connection_init, <<>>, client}, _Pid}, _From,
- #state{certificate_db = [CertDb, FileRefDb, PemChace],
- session_cache_client = Cache} = State) ->
- Result = {ok, make_ref(),CertDb, FileRefDb, PemChace, Cache},
- {reply, Result, State};
-handle_call({{connection_init, <<>>, server}, _Pid}, _From,
- #state{certificate_db = [CertDb, FileRefDb, PemChace],
- session_cache_server = Cache} = State) ->
- Result = {ok, make_ref(),CertDb, FileRefDb, PemChace, Cache},
- {reply, Result, State};
-
-handle_call({{connection_init, Trustedcerts, client}, Pid}, _From,
- #state{certificate_db = [CertDb, FileRefDb, PemChace] = Db,
- session_cache_client = Cache} = State) ->
- Result =
- try
- {ok, Ref} = ssl_pkix_db:add_trusted_certs(Pid, Trustedcerts, Db),
- {ok, Ref, CertDb, FileRefDb, PemChace, Cache}
- catch
- _:Reason ->
- {error, Reason}
- end,
- {reply, Result, State};
-handle_call({{connection_init, Trustedcerts, server}, Pid}, _From,
- #state{certificate_db = [CertDb, FileRefDb, PemChace] = Db,
- session_cache_server = Cache} = State) ->
- Result =
- try
- {ok, Ref} = ssl_pkix_db:add_trusted_certs(Pid, Trustedcerts, Db),
- {ok, Ref, CertDb, FileRefDb, PemChace, Cache}
- catch
- _:Reason ->
- {error, Reason}
- end,
- {reply, Result, State};
-
-
-handle_call({{new_session_id,Port}, _},
+handle_call({{connection_init, <<>>, Role, {CRLCb, UserCRLDb}}, _Pid}, _From,
+ #state{certificate_db = [CertDb, FileRefDb, PemChace | _] = Db} = State) ->
+ Ref = make_ref(),
+ Result = {ok, Ref, CertDb, FileRefDb, PemChace, session_cache(Role, State), {CRLCb, crl_db_info(Db, UserCRLDb)}},
+ {reply, Result, State#state{certificate_db = Db}};
+
+handle_call({{connection_init, Trustedcerts, Role, {CRLCb, UserCRLDb}}, Pid}, _From,
+ #state{certificate_db = [CertDb, FileRefDb, PemChace | _] = Db} = State) ->
+ case add_trusted_certs(Pid, Trustedcerts, Db) of
+ {ok, Ref} ->
+ {reply, {ok, Ref, CertDb, FileRefDb, PemChace, session_cache(Role, State),
+ {CRLCb, crl_db_info(Db, UserCRLDb)}}, State};
+ {error, _} = Error ->
+ {reply, Error, State}
+ end;
+
+handle_call({{insert_crls, Path, CRLs}, _}, _From,
+ #state{certificate_db = Db} = State) ->
+ ssl_pkix_db:add_crls(Db, Path, CRLs),
+ {reply, ok, State};
+
+handle_call({{delete_crls, CRLsOrPath}, _}, _From,
+ #state{certificate_db = Db} = State) ->
+ ssl_pkix_db:remove_crls(Db, CRLsOrPath),
+ {reply, ok, State};
+
+handle_call({{new_session_id, Port}, _},
_, #state{session_cache_cb = CacheCb,
session_cache_server = Cache} = State) ->
Id = new_id(Port, ?GEN_UNIQUE_ID_MAX_TRIES, Cache, CacheCb),
{reply, Id, State};
-
-handle_call({{cache_pem, File}, _Pid}, _,
+handle_call({{cache_pem,File}, _Pid}, _,
#state{certificate_db = Db} = State) ->
try ssl_pkix_db:cache_pem_file(File, Db) of
Result ->
@@ -288,7 +304,7 @@ handle_call({{cache_pem, File}, _Pid}, _,
_:Reason ->
{reply, {error, Reason}, State}
end;
-handle_call({unconditionally_clear_pem_cache, _},_, #state{certificate_db = [_,_,PemChace]} = State) ->
+handle_call({unconditionally_clear_pem_cache, _},_, #state{certificate_db = [_,_,PemChace | _]} = State) ->
ssl_pkix_db:clear(PemChace),
{reply, ok, State}.
@@ -332,7 +348,23 @@ handle_cast({invalidate_session, Host, Port,
handle_cast({invalidate_session, Port, #session{session_id = ID} = Session},
#state{session_cache_server = Cache,
session_cache_cb = CacheCb} = State) ->
- invalidate_session(Cache, CacheCb, {Port, ID}, Session, State).
+ invalidate_session(Cache, CacheCb, {Port, ID}, Session, State);
+
+
+handle_cast({insert_crls, Path, CRLs},
+ #state{certificate_db = Db} = State) ->
+ ssl_pkix_db:add_crls(Db, Path, CRLs),
+ {noreply, State};
+
+handle_cast({delete_crls, CRLsOrPath},
+ #state{certificate_db = Db} = State) ->
+ ssl_pkix_db:remove_crls(Db, CRLsOrPath),
+ {noreply, State};
+
+handle_cast({invalidate_pem, File},
+ #state{certificate_db = [_, _, PemCache | _]} = State) ->
+ ssl_pkix_db:remove(File, PemCache),
+ {noreply, State}.
%%--------------------------------------------------------------------
-spec handle_info(msg(), #state{}) -> {noreply, #state{}}.
@@ -353,24 +385,22 @@ handle_info(validate_sessions, #state{session_cache_cb = CacheCb,
start_session_validator(ServerCache, CacheCb, LifeTime),
{noreply, State#state{session_validation_timer = Timer}};
+
handle_info({delayed_clean_session, Key, Cache}, #state{session_cache_cb = CacheCb
} = State) ->
CacheCb:delete(Cache, Key),
{noreply, State};
-handle_info(clear_pem_cache, #state{certificate_db = [_,_,PemChace]} = State) ->
- case ssl_pkix_db:db_size(PemChace) of
- N when N < ?NOT_TO_BIG ->
- ok;
- _ ->
- ssl_pkix_db:clear(PemChace)
- end,
- erlang:send_after(?CLEAR_PEM_CACHE, self(), clear_pem_cache),
- {noreply, State};
-
+handle_info(clear_pem_cache, #state{certificate_db = [_,_,PemChace | _],
+ clear_pem_cache = Interval,
+ last_pem_check = CheckPoint} = State) ->
+ NewCheckPoint = os:timestamp(),
+ start_pem_cache_validator(PemChace, CheckPoint),
+ erlang:send_after(Interval, self(), clear_pem_cache),
+ {noreply, State#state{last_pem_check = NewCheckPoint}};
handle_info({clean_cert_db, Ref, File},
- #state{certificate_db = [CertDb,RefDb, PemCache]} = State) ->
+ #state{certificate_db = [CertDb,RefDb, PemCache | _]} = State) ->
case ssl_pkix_db:lookup(Ref, RefDb) of
undefined -> %% Alredy cleaned
@@ -514,10 +544,9 @@ new_id(Port, Tries, Cache, CacheCb) ->
clean_cert_db(Ref, CertDb, RefDb, PemCache, File) ->
case ssl_pkix_db:ref_count(Ref, RefDb, 0) of
0 ->
- MD5 = crypto:hash(md5, File),
- case ssl_pkix_db:lookup_cached_pem(PemCache, MD5) of
+ case ssl_pkix_db:lookup_cached_pem(PemCache, File) of
[{Content, Ref}] ->
- ssl_pkix_db:insert(MD5, Content, PemCache);
+ ssl_pkix_db:insert(File, Content, PemCache);
_ ->
ok
end,
@@ -557,3 +586,57 @@ exists_equivalent(#session{
true;
exists_equivalent(Session, [ _ | Rest]) ->
exists_equivalent(Session, Rest).
+
+start_pem_cache_validator(PemCache, CheckPoint) ->
+ spawn_link(?MODULE, init_pem_cache_validator,
+ [[get(ssl_manager), PemCache, CheckPoint]]).
+
+init_pem_cache_validator([SslManagerName, PemCache, CheckPoint]) ->
+ put(ssl_manager, SslManagerName),
+ ssl_pkix_db:foldl(fun pem_cache_validate/2,
+ CheckPoint, PemCache).
+
+pem_cache_validate({File, _}, CheckPoint) ->
+ case file:read_file_info(File, []) of
+ {ok, #file_info{mtime = Time}} ->
+ case is_before_checkpoint(Time, CheckPoint) of
+ true ->
+ ok;
+ false ->
+ invalidate_pem(File)
+ end;
+ _ ->
+ invalidate_pem(File)
+ end,
+ CheckPoint.
+
+pem_check_interval() ->
+ case application:get_env(ssl, ssl_pem_cache_clean) of
+ {ok, Interval} when is_integer(Interval) ->
+ Interval;
+ _ ->
+ ?CLEAR_PEM_CACHE
+ end.
+
+is_before_checkpoint(Time, CheckPoint) ->
+ calendar:datetime_to_gregorian_seconds(calendar:now_to_datetime(CheckPoint)) -
+ calendar:datetime_to_gregorian_seconds(Time) > 0.
+
+add_trusted_certs(Pid, Trustedcerts, Db) ->
+ try
+ ssl_pkix_db:add_trusted_certs(Pid, Trustedcerts, Db)
+ catch
+ _:Reason ->
+ {error, Reason}
+ end.
+
+session_cache(client, #state{session_cache_client = Cache}) ->
+ Cache;
+session_cache(server, #state{session_cache_server = Cache}) ->
+ Cache.
+
+crl_db_info([_,_,_,Local], {internal, Info}) ->
+ {Local, Info};
+crl_db_info(_, UserCRLDb) ->
+ UserCRLDb.
+
diff --git a/lib/ssl/src/ssl_pkix_db.erl b/lib/ssl/src/ssl_pkix_db.erl
index e59aba0618..d7b7e3eae3 100644
--- a/lib/ssl/src/ssl_pkix_db.erl
+++ b/lib/ssl/src/ssl_pkix_db.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -27,9 +27,9 @@
-include_lib("public_key/include/public_key.hrl").
-include_lib("kernel/include/file.hrl").
--export([create/0, remove/1, add_trusted_certs/3,
+-export([create/0, add_crls/3, remove_crls/2, remove/1, add_trusted_certs/3,
remove_trusted_certs/2, insert/3, remove/2, clear/1, db_size/1,
- ref_count/3, lookup_trusted_cert/4, foldl/3,
+ ref_count/3, lookup_trusted_cert/4, foldl/3, select_cert_by_issuer/2,
lookup_cached_pem/2, cache_pem_file/2, cache_pem_file/3,
lookup/2]).
@@ -51,16 +51,24 @@ create() ->
ets:new(ssl_otp_cacertificate_db, [set, public]),
%% Let connection processes call ref_count/3 directly
ets:new(ssl_otp_ca_file_ref, [set, public]),
- ets:new(ssl_otp_pem_cache, [set, protected])
+ ets:new(ssl_otp_pem_cache, [set, protected]),
+ %% Default cache
+ {ets:new(ssl_otp_crl_cache, [set, protected]),
+ ets:new(ssl_otp_crl_issuer_mapping, [bag, protected])}
].
%%--------------------------------------------------------------------
--spec remove([db_handle()]) -> ok.
+-spec remove([db_handle()]) -> ok.
%%
%% Description: Removes database db
%%--------------------------------------------------------------------
remove(Dbs) ->
- lists:foreach(fun(Db) ->
+ lists:foreach(fun({Db0, Db1}) ->
+ true = ets:delete(Db0),
+ true = ets:delete(Db1);
+ (undefined) ->
+ ok;
+ (Db) ->
true = ets:delete(Db)
end, Dbs).
@@ -81,10 +89,10 @@ lookup_trusted_cert(DbHandle, Ref, SerialNumber, Issuer) ->
{ok, Certs}
end.
-lookup_cached_pem([_, _, PemChache], MD5) ->
- lookup_cached_pem(PemChache, MD5);
-lookup_cached_pem(PemChache, MD5) ->
- lookup(MD5, PemChache).
+lookup_cached_pem([_, _, PemChache | _], File) ->
+ lookup_cached_pem(PemChache, File);
+lookup_cached_pem(PemChache, File) ->
+ lookup(File, PemChache).
%%--------------------------------------------------------------------
-spec add_trusted_certs(pid(), {erlang:timestamp(), string()} |
@@ -94,42 +102,42 @@ lookup_cached_pem(PemChache, MD5) ->
%% runtime database. Returns Ref that should be handed to lookup_trusted_cert
%% together with the cert serialnumber and issuer.
%%--------------------------------------------------------------------
-add_trusted_certs(_Pid, {der, DerList}, [CerDb, _,_]) ->
+add_trusted_certs(_Pid, {der, DerList}, [CertDb, _,_ | _]) ->
NewRef = make_ref(),
- add_certs_from_der(DerList, NewRef, CerDb),
+ add_certs_from_der(DerList, NewRef, CertDb),
{ok, NewRef};
-add_trusted_certs(_Pid, File, [CertsDb, RefDb, PemChache] = Db) ->
- MD5 = crypto:hash(md5, File),
- case lookup_cached_pem(Db, MD5) of
+add_trusted_certs(_Pid, File, [CertsDb, RefDb, PemChache | _] = Db) ->
+ case lookup_cached_pem(Db, File) of
[{_Content, Ref}] ->
ref_count(Ref, RefDb, 1),
{ok, Ref};
[Content] ->
Ref = make_ref(),
update_counter(Ref, 1, RefDb),
- insert(MD5, {Content, Ref}, PemChache),
+ insert(File, {Content, Ref}, PemChache),
add_certs_from_pem(Content, Ref, CertsDb),
{ok, Ref};
undefined ->
- new_trusted_cert_entry({MD5, File}, Db)
+ new_trusted_cert_entry(File, Db)
end.
%%--------------------------------------------------------------------
%%
%% Description: Cache file as binary in DB
%%--------------------------------------------------------------------
--spec cache_pem_file({binary(), binary()}, [db_handle()]) -> {ok, term()}.
-cache_pem_file({MD5, File}, [_CertsDb, _RefDb, PemChache]) ->
+-spec cache_pem_file(binary(), [db_handle()]) -> {ok, term()}.
+cache_pem_file(File, [_CertsDb, _RefDb, PemChache | _]) ->
{ok, PemBin} = file:read_file(File),
Content = public_key:pem_decode(PemBin),
- insert(MD5, Content, PemChache),
+ insert(File, Content, PemChache),
{ok, Content}.
--spec cache_pem_file(reference(), {binary(), binary()}, [db_handle()]) -> {ok, term()}.
-cache_pem_file(Ref, {MD5, File}, [_CertsDb, _RefDb, PemChache]) ->
+
+-spec cache_pem_file(reference(), binary(), [db_handle()]) -> {ok, term()}.
+cache_pem_file(Ref, File, [_CertsDb, _RefDb, PemChache| _]) ->
{ok, PemBin} = file:read_file(File),
Content = public_key:pem_decode(PemBin),
- insert(MD5, {Content, Ref}, PemChache),
+ insert(File, {Content, Ref}, PemChache),
{ok, Content}.
%%--------------------------------------------------------------------
@@ -150,6 +158,15 @@ remove(Key, Db) ->
ok.
%%--------------------------------------------------------------------
+-spec remove(term(), term(), db_handle()) -> ok.
+%%
+%% Description: Removes an element in a <Db>.
+%%--------------------------------------------------------------------
+remove(Key, Data, Db) ->
+ ets:delete_object(Db, {Key, Data}),
+ ok.
+
+%%--------------------------------------------------------------------
-spec lookup(term(), db_handle()) -> [term()] | undefined.
%%
%% Description: Looks up an element in a <Db>.
@@ -176,6 +193,10 @@ lookup(Key, Db) ->
foldl(Fun, Acc0, Cache) ->
ets:foldl(Fun, Acc0, Cache).
+
+select_cert_by_issuer(Cache, Issuer) ->
+ ets:select(Cache, [{{{'_','_', Issuer},{'_', '$1'}},[],['$$']}]).
+
%%--------------------------------------------------------------------
-spec ref_count(term(), db_handle(), integer()) -> integer().
%%
@@ -245,9 +266,39 @@ add_certs(Cert, Ref, CertsDb) ->
error_logger:info_report(Report)
end.
-new_trusted_cert_entry(FileRef, [CertsDb, RefDb, _] = Db) ->
+new_trusted_cert_entry(File, [CertsDb, RefDb, _ | _] = Db) ->
Ref = make_ref(),
update_counter(Ref, 1, RefDb),
- {ok, Content} = cache_pem_file(Ref, FileRef, Db),
+ {ok, Content} = cache_pem_file(Ref, File, Db),
add_certs_from_pem(Content, Ref, CertsDb),
{ok, Ref}.
+
+add_crls([_,_,_, {_, Mapping} | _], ?NO_DIST_POINT, CRLs) ->
+ [add_crls(CRL, Mapping) || CRL <- CRLs];
+add_crls([_,_,_, {Cache, Mapping} | _], Path, CRLs) ->
+ insert(Path, CRLs, Cache),
+ [add_crls(CRL, Mapping) || CRL <- CRLs].
+
+add_crls(CRL, Mapping) ->
+ insert(crl_issuer(CRL), CRL, Mapping).
+
+remove_crls([_,_,_, {_, Mapping} | _], {?NO_DIST_POINT, CRLs}) ->
+ [rm_crls(CRL, Mapping) || CRL <- CRLs];
+
+remove_crls([_,_,_, {Cache, Mapping} | _], Path) ->
+ case lookup(Path, Cache) of
+ undefined ->
+ ok;
+ CRLs ->
+ remove(Path, Cache),
+ [rm_crls(CRL, Mapping) || CRL <- CRLs]
+ end.
+
+rm_crls(CRL, Mapping) ->
+ remove(crl_issuer(CRL), CRL, Mapping).
+
+crl_issuer(DerCRL) ->
+ CRL = public_key:der_decode('CertificateList', DerCRL),
+ TBSCRL = CRL#'CertificateList'.tbsCertList,
+ TBSCRL#'TBSCertList'.issuer.
+
diff --git a/lib/ssl/src/ssl_record.erl b/lib/ssl/src/ssl_record.erl
index 63fc57edad..a02375a947 100644
--- a/lib/ssl/src/ssl_record.erl
+++ b/lib/ssl/src/ssl_record.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2013-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -48,7 +48,7 @@
-export([compress/3, uncompress/3, compressions/0]).
%% Payload encryption/decryption
--export([cipher/4, decipher/3, is_correct_mac/2,
+-export([cipher/4, decipher/4, is_correct_mac/2,
cipher_aead/4, decipher_aead/4]).
-export_type([ssl_version/0, ssl_atom_version/0]).
@@ -396,7 +396,7 @@ cipher_aead(Version, Fragment,
{CipherFragment, WriteState0#connection_state{cipher_state = CipherS1}}.
%%--------------------------------------------------------------------
--spec decipher(ssl_version(), binary(), #connection_state{}) -> {binary(), binary(), #connection_state{}} | #alert{}.
+-spec decipher(ssl_version(), binary(), #connection_state{}, boolean()) -> {binary(), binary(), #connection_state{}} | #alert{}.
%%
%% Description: Payload decryption
%%--------------------------------------------------------------------
@@ -406,8 +406,8 @@ decipher(Version, CipherFragment,
BulkCipherAlgo,
hash_size = HashSz},
cipher_state = CipherS0
- } = ReadState) ->
- case ssl_cipher:decipher(BulkCipherAlgo, HashSz, CipherS0, CipherFragment, Version) of
+ } = ReadState, PaddingCheck) ->
+ case ssl_cipher:decipher(BulkCipherAlgo, HashSz, CipherS0, CipherFragment, Version, PaddingCheck) of
{PlainFragment, Mac, CipherS1} ->
CS1 = ReadState#connection_state{cipher_state = CipherS1},
{PlainFragment, Mac, CS1};
diff --git a/lib/ssl/src/ssl_v3.erl b/lib/ssl/src/ssl_v3.erl
index 68f7f5dee2..169b39be32 100644
--- a/lib/ssl/src/ssl_v3.erl
+++ b/lib/ssl/src/ssl_v3.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -143,9 +143,6 @@ suites() ->
?TLS_DHE_RSA_WITH_AES_128_CBC_SHA,
?TLS_DHE_DSS_WITH_AES_128_CBC_SHA,
?TLS_RSA_WITH_AES_128_CBC_SHA,
- %%?TLS_RSA_WITH_IDEA_CBC_SHA,
- ?TLS_RSA_WITH_RC4_128_SHA,
- ?TLS_RSA_WITH_RC4_128_MD5,
?TLS_RSA_WITH_DES_CBC_SHA
].
diff --git a/lib/ssl/src/tls_connection.erl b/lib/ssl/src/tls_connection.erl
index 7df73fb581..77d3aa7889 100644
--- a/lib/ssl/src/tls_connection.erl
+++ b/lib/ssl/src/tls_connection.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -482,8 +482,9 @@ next_record(#state{protocol_buffers = #protocol_buffers{tls_packets = [], tls_ci
next_record(#state{protocol_buffers =
#protocol_buffers{tls_packets = [], tls_cipher_texts = [CT | Rest]}
= Buffers,
- connection_states = ConnStates0} = State) ->
- case tls_record:decode_cipher_text(CT, ConnStates0) of
+ connection_states = ConnStates0,
+ ssl_options = #ssl_options{padding_check = Check}} = State) ->
+ case tls_record:decode_cipher_text(CT, ConnStates0, Check) of
{Plain, ConnStates} ->
{Plain, State#state{protocol_buffers =
Buffers#protocol_buffers{tls_cipher_texts = Rest},
diff --git a/lib/ssl/src/tls_handshake.erl b/lib/ssl/src/tls_handshake.erl
index 183cabcfcd..b0b6d5a8e3 100644
--- a/lib/ssl/src/tls_handshake.erl
+++ b/lib/ssl/src/tls_handshake.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -28,6 +28,7 @@
-include("tls_record.hrl").
-include("ssl_alert.hrl").
-include("ssl_internal.hrl").
+-include("ssl_cipher.hrl").
-include_lib("public_key/include/public_key.hrl").
-export([client_hello/8, hello/4,
@@ -47,22 +48,28 @@
%%--------------------------------------------------------------------
client_hello(Host, Port, ConnectionStates,
#ssl_options{versions = Versions,
- ciphers = UserSuites
+ ciphers = UserSuites,
+ fallback = Fallback
} = SslOpts,
Cache, CacheCb, Renegotiation, OwnCert) ->
Version = tls_record:highest_protocol_version(Versions),
Pending = ssl_record:pending_connection_state(ConnectionStates, read),
SecParams = Pending#connection_state.security_parameters,
- CipherSuites = ssl_handshake:available_suites(UserSuites, Version),
+ AvailableCipherSuites = ssl_handshake:available_suites(UserSuites, Version),
Extensions = ssl_handshake:client_hello_extensions(Host, Version,
- CipherSuites,
+ AvailableCipherSuites,
SslOpts, ConnectionStates, Renegotiation),
-
- Id = ssl_session:client_id({Host, Port, SslOpts}, Cache, CacheCb, OwnCert),
-
+ CipherSuites =
+ case Fallback of
+ true ->
+ [?TLS_FALLBACK_SCSV | ssl_handshake:cipher_suites(AvailableCipherSuites, Renegotiation)];
+ false ->
+ ssl_handshake:cipher_suites(AvailableCipherSuites, Renegotiation)
+ end,
+ Id = ssl_session:client_id({Host, Port, SslOpts}, Cache, CacheCb, OwnCert),
#client_hello{session_id = Id,
client_version = Version,
- cipher_suites = ssl_handshake:cipher_suites(CipherSuites, Renegotiation),
+ cipher_suites = CipherSuites,
compression_methods = ssl_record:compressions(),
random = SecParams#security_parameters.client_random,
extensions = Extensions
@@ -96,33 +103,22 @@ hello(#server_hello{server_version = Version, random = Random,
end;
hello(#client_hello{client_version = ClientVersion,
- session_id = SugesstedId,
- cipher_suites = CipherSuites,
- compression_methods = Compressions,
- random = Random,
- extensions = #hello_extensions{elliptic_curves = Curves} = HelloExt},
+ cipher_suites = CipherSuites} = Hello,
#ssl_options{versions = Versions} = SslOpts,
- {Port, Session0, Cache, CacheCb, ConnectionStates0, Cert}, Renegotiation) ->
+ Info, Renegotiation) ->
Version = ssl_handshake:select_version(tls_record, ClientVersion, Versions),
- case tls_record:is_acceptable_version(Version, Versions) of
- true ->
- ECCCurve = ssl_handshake:select_curve(Curves, ssl_handshake:supported_ecc(Version)),
- {Type, #session{cipher_suite = CipherSuite} = Session1}
- = ssl_handshake:select_session(SugesstedId, CipherSuites, Compressions,
- Port, Session0#session{ecc = ECCCurve}, Version,
- SslOpts, Cache, CacheCb, Cert),
- case CipherSuite of
- no_suite ->
- ?ALERT_REC(?FATAL, ?INSUFFICIENT_SECURITY);
- _ ->
- handle_client_hello_extensions(Version, Type, Random, CipherSuites, HelloExt,
- SslOpts, Session1, ConnectionStates0,
- Renegotiation)
+ case ssl_cipher:is_fallback(CipherSuites) of
+ true ->
+ Highest = tls_record:highest_protocol_version(Versions),
+ case tls_record:is_higher(Highest, Version) of
+ true ->
+ ?ALERT_REC(?FATAL, ?INAPPROPRIATE_FALLBACK);
+ false ->
+ handle_client_hello(Version, Hello, SslOpts, Info, Renegotiation)
end;
false ->
- ?ALERT_REC(?FATAL, ?PROTOCOL_VERSION)
+ handle_client_hello(Version, Hello, SslOpts, Info, Renegotiation)
end.
-
%%--------------------------------------------------------------------
-spec encode_handshake(tls_handshake(), tls_record:tls_version()) -> iolist().
%%
@@ -149,6 +145,32 @@ get_tls_handshake(Version, Data, Buffer) ->
%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
+handle_client_hello(Version, #client_hello{session_id = SugesstedId,
+ cipher_suites = CipherSuites,
+ compression_methods = Compressions,
+ random = Random,
+ extensions = #hello_extensions{elliptic_curves = Curves} = HelloExt},
+ #ssl_options{versions = Versions} = SslOpts,
+ {Port, Session0, Cache, CacheCb, ConnectionStates0, Cert}, Renegotiation) ->
+ case tls_record:is_acceptable_version(Version, Versions) of
+ true ->
+ ECCCurve = ssl_handshake:select_curve(Curves, ssl_handshake:supported_ecc(Version)),
+ {Type, #session{cipher_suite = CipherSuite} = Session1}
+ = ssl_handshake:select_session(SugesstedId, CipherSuites, Compressions,
+ Port, Session0#session{ecc = ECCCurve}, Version,
+ SslOpts, Cache, CacheCb, Cert),
+ case CipherSuite of
+ no_suite ->
+ ?ALERT_REC(?FATAL, ?INSUFFICIENT_SECURITY);
+ _ ->
+ handle_client_hello_extensions(Version, Type, Random, CipherSuites, HelloExt,
+ SslOpts, Session1, ConnectionStates0,
+ Renegotiation)
+ end;
+ false ->
+ ?ALERT_REC(?FATAL, ?PROTOCOL_VERSION)
+ end.
+
get_tls_handshake_aux(Version, <<?BYTE(Type), ?UINT24(Length),
Body:Length/binary,Rest/binary>>, Acc) ->
Raw = <<?BYTE(Type), ?UINT24(Length), Body/binary>>,
diff --git a/lib/ssl/src/tls_record.erl b/lib/ssl/src/tls_record.erl
index 544d200f70..14a49ac7da 100644
--- a/lib/ssl/src/tls_record.erl
+++ b/lib/ssl/src/tls_record.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -34,14 +34,14 @@
-export([get_tls_records/2]).
%% Decoding
--export([decode_cipher_text/2]).
+-export([decode_cipher_text/3]).
%% Encoding
-export([encode_plain_text/4]).
%% Protocol version handling
-export([protocol_version/1, lowest_protocol_version/2,
- highest_protocol_version/1, supported_protocol_versions/0,
+ highest_protocol_version/1, is_higher/2, supported_protocol_versions/0,
is_acceptable_version/1, is_acceptable_version/2]).
-export_type([tls_version/0, tls_atom_version/0]).
@@ -159,7 +159,7 @@ encode_plain_text(Type, Version, Data,
{CipherText, ConnectionStates#connection_states{current_write = WriteState#connection_state{sequence_number = Seq +1}}}.
%%--------------------------------------------------------------------
--spec decode_cipher_text(#ssl_tls{}, #connection_states{}) ->
+-spec decode_cipher_text(#ssl_tls{}, #connection_states{}, boolean()) ->
{#ssl_tls{}, #connection_states{}}| #alert{}.
%%
%% Description: Decode cipher text
@@ -174,7 +174,7 @@ decode_cipher_text(#ssl_tls{type = Type, version = Version,
#security_parameters{
cipher_type = ?AEAD,
compression_algorithm=CompAlg}
- } = ReadState0} = ConnnectionStates0) ->
+ } = ReadState0} = ConnnectionStates0, _) ->
AAD = calc_aad(Type, Version, ReadState0),
case ssl_record:decipher_aead(Version, CipherFragment, ReadState0, AAD) of
{PlainFragment, ReadState1} ->
@@ -197,8 +197,8 @@ decode_cipher_text(#ssl_tls{type = Type, version = Version,
sequence_number = Seq,
security_parameters=
#security_parameters{compression_algorithm=CompAlg}
- } = ReadState0} = ConnnectionStates0) ->
- case ssl_record:decipher(Version, CipherFragment, ReadState0) of
+ } = ReadState0} = ConnnectionStates0, PaddingCheck) ->
+ case ssl_record:decipher(Version, CipherFragment, ReadState0, PaddingCheck) of
{PlainFragment, Mac, ReadState1} ->
MacHash = calc_mac_hash(Type, Version, PlainFragment, ReadState1),
case ssl_record:is_correct_mac(Mac, MacHash) of
@@ -278,6 +278,13 @@ highest_protocol_version(Version = {M,_}, [{N,_} | Rest]) when M > N ->
highest_protocol_version(_, [Version | Rest]) ->
highest_protocol_version(Version, Rest).
+is_higher({M, N}, {M, O}) when N > O ->
+ true;
+is_higher({M, _}, {N, _}) when M > N ->
+ true;
+is_higher(_, _) ->
+ false.
+
%%--------------------------------------------------------------------
-spec supported_protocol_versions() -> [tls_version()].
%%
@@ -311,8 +318,17 @@ supported_protocol_versions([]) ->
Vsns;
supported_protocol_versions([_|_] = Vsns) ->
- Vsns.
-
+ case sufficient_tlsv1_2_crypto_support() of
+ true ->
+ Vsns;
+ false ->
+ case Vsns -- ['tlsv1.2'] of
+ [] ->
+ ?MIN_SUPPORTED_VERSIONS;
+ NewVsns ->
+ NewVsns
+ end
+ end.
%%--------------------------------------------------------------------
%%
%% Description: ssl version 2 is not acceptable security risks are too big.
diff --git a/lib/ssl/src/tls_v1.erl b/lib/ssl/src/tls_v1.erl
index c4114278a4..559fc1d6a8 100644
--- a/lib/ssl/src/tls_v1.erl
+++ b/lib/ssl/src/tls_v1.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -208,15 +208,7 @@ suites(Minor) when Minor == 1; Minor == 2 ->
?TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA,
?TLS_ECDH_RSA_WITH_AES_128_CBC_SHA,
?TLS_RSA_WITH_AES_128_CBC_SHA,
-
- ?TLS_ECDHE_ECDSA_WITH_RC4_128_SHA,
- ?TLS_ECDHE_RSA_WITH_RC4_128_SHA,
- ?TLS_RSA_WITH_RC4_128_SHA,
- ?TLS_RSA_WITH_RC4_128_MD5,
?TLS_DHE_RSA_WITH_DES_CBC_SHA,
- ?TLS_ECDH_ECDSA_WITH_RC4_128_SHA,
- ?TLS_ECDH_RSA_WITH_RC4_128_SHA,
-
?TLS_RSA_WITH_DES_CBC_SHA
];
suites(3) ->
diff --git a/lib/ssl/test/Makefile b/lib/ssl/test/Makefile
index 3639c2b2da..09cc5981e7 100644
--- a/lib/ssl/test/Makefile
+++ b/lib/ssl/test/Makefile
@@ -1,7 +1,7 @@
#
# %CopyrightBegin%
#
-# Copyright Ericsson AB 1999-2013. All Rights Reserved.
+# Copyright Ericsson AB 1999-2015. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -47,9 +47,11 @@ MODULES = \
ssl_npn_handshake_SUITE \
ssl_packet_SUITE \
ssl_payload_SUITE \
+ ssl_pem_cache_SUITE \
ssl_session_cache_SUITE \
ssl_to_openssl_SUITE \
ssl_ECC_SUITE \
+ ssl_upgrade_SUITE\
make_certs\
erl_make_certs
diff --git a/lib/ssl/test/make_certs.erl b/lib/ssl/test/make_certs.erl
index 15a7e118ff..77631f62d3 100644
--- a/lib/ssl/test/make_certs.erl
+++ b/lib/ssl/test/make_certs.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2012. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -324,8 +324,9 @@ eval_cmd(Port, Cmd) ->
ok
end,
receive
- {Port, {exit_status, Status}} when Status /= 0 ->
- %% io:fwrite("exit status: ~w~n", [Status]),
+ {Port, {exit_status, 0}} ->
+ ok;
+ {Port, {exit_status, Status}} ->
exit({eval_cmd, Cmd, Status})
after 0 ->
ok
@@ -369,7 +370,7 @@ req_cnf(C) ->
"subjectKeyIdentifier = hash\n"
"subjectAltName = email:copy\n"].
-ca_cnf(C) ->
+ca_cnf(C = #config{issuing_distribution_point = true}) ->
["# Purpose: Configuration for CAs.\n"
"\n"
"ROOTDIR = $ENV::ROOTDIR\n"
@@ -446,5 +447,83 @@ ca_cnf(C) ->
"subjectAltName = email:copy\n"
"issuerAltName = issuer:copy\n"
"crlDistributionPoints=@crl_section\n"
- ].
+ ];
+ca_cnf(C = #config{issuing_distribution_point = false}) ->
+ ["# Purpose: Configuration for CAs.\n"
+ "\n"
+ "ROOTDIR = $ENV::ROOTDIR\n"
+ "default_ca = ca\n"
+ "\n"
+
+ "[ca]\n"
+ "dir = $ROOTDIR/", C#config.commonName, "\n"
+ "certs = $dir/certs\n"
+ "crl_dir = $dir/crl\n"
+ "database = $dir/index.txt\n"
+ "new_certs_dir = $dir/newcerts\n"
+ "certificate = $dir/cert.pem\n"
+ "serial = $dir/serial\n"
+ "crl = $dir/crl.pem\n",
+ ["crlnumber = $dir/crlnumber\n" || C#config.v2_crls],
+ "private_key = $dir/private/key.pem\n"
+ "RANDFILE = $dir/private/RAND\n"
+ "\n"
+ "x509_extensions = user_cert\n",
+ ["crl_extensions = crl_ext\n" || C#config.v2_crls],
+ "unique_subject = no\n"
+ "default_days = 3600\n"
+ "default_md = md5\n"
+ "preserve = no\n"
+ "policy = policy_match\n"
+ "\n"
+
+ "[policy_match]\n"
+ "commonName = supplied\n"
+ "organizationalUnitName = optional\n"
+ "organizationName = match\n"
+ "countryName = match\n"
+ "localityName = match\n"
+ "emailAddress = supplied\n"
+ "\n"
+
+ "[crl_ext]\n"
+ "authorityKeyIdentifier=keyid:always,issuer:always\n",
+ %["issuingDistributionPoint=critical, @idpsec\n" || C#config.issuing_distribution_point],
+
+ %"[idpsec]\n"
+ %"fullname=URI:http://localhost:8000/",C#config.commonName,"/crl.pem\n"
+
+ "[user_cert]\n"
+ "basicConstraints = CA:false\n"
+ "keyUsage = nonRepudiation, digitalSignature, keyEncipherment\n"
+ "subjectKeyIdentifier = hash\n"
+ "authorityKeyIdentifier = keyid,issuer:always\n"
+ "subjectAltName = email:copy\n"
+ "issuerAltName = issuer:copy\n"
+ %"crlDistributionPoints=@crl_section\n"
+
+ %%"[crl_section]\n"
+ %% intentionally invalid
+ %%"URI.1=http://localhost/",C#config.commonName,"/crl.pem\n"
+ %%"URI.2=http://localhost:",integer_to_list(C#config.crl_port),"/",C#config.commonName,"/crl.pem\n"
+ %%"\n"
+
+ "[user_cert_digital_signature_only]\n"
+ "basicConstraints = CA:false\n"
+ "keyUsage = digitalSignature\n"
+ "subjectKeyIdentifier = hash\n"
+ "authorityKeyIdentifier = keyid,issuer:always\n"
+ "subjectAltName = email:copy\n"
+ "issuerAltName = issuer:copy\n"
+ "\n"
+
+ "[ca_cert]\n"
+ "basicConstraints = critical,CA:true\n"
+ "keyUsage = cRLSign, keyCertSign\n"
+ "subjectKeyIdentifier = hash\n"
+ "authorityKeyIdentifier = keyid:always,issuer:always\n"
+ "subjectAltName = email:copy\n"
+ "issuerAltName = issuer:copy\n"
+ %"crlDistributionPoints=@crl_section\n"
+ ].
diff --git a/lib/ssl/test/ssl_basic_SUITE.erl b/lib/ssl/test/ssl_basic_SUITE.erl
index dc9e8934e6..50d5fb411f 100644
--- a/lib/ssl/test/ssl_basic_SUITE.erl
+++ b/lib/ssl/test/ssl_basic_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -65,7 +65,7 @@ groups() ->
{'tlsv1.2', [], all_versions_groups()},
{'tlsv1.1', [], all_versions_groups()},
{'tlsv1', [], all_versions_groups() ++ rizzo_tests()},
- {'sslv3', [], all_versions_groups() ++ rizzo_tests()},
+ {'sslv3', [], all_versions_groups() ++ rizzo_tests() ++ [ciphersuite_vs_version]},
{api,[], api_tests()},
{session, [], session_tests()},
{renegotiate, [], renegotiate_tests()},
@@ -90,7 +90,9 @@ basic_tests() ->
version_option,
connect_twice,
connect_dist,
- clear_pem_cache
+ clear_pem_cache,
+ defaults,
+ fallback
].
options_tests() ->
@@ -116,7 +118,6 @@ options_tests() ->
tcp_reuseaddr,
honor_server_cipher_order,
honor_client_cipher_order,
- ciphersuite_vs_version,
unordered_protocol_versions_server,
unordered_protocol_versions_client
].
@@ -177,6 +178,9 @@ cipher_tests() ->
srp_cipher_suites,
srp_anon_cipher_suites,
srp_dsa_cipher_suites,
+ rc4_rsa_cipher_suites,
+ rc4_ecdh_rsa_cipher_suites,
+ rc4_ecdsa_cipher_suites,
default_reject_anonymous].
cipher_tests_ec() ->
@@ -256,11 +260,6 @@ init_per_testcase(Case, Config) when Case == unordered_protocol_versions_client
_ ->
{skip, "TLS 1.2 need but not supported on this platform"}
end;
-init_per_testcase(no_authority_key_identifier, Config) ->
- %% Clear cach so that root cert will not
- %% be found.
- ssl:clear_pem_cache(),
- Config;
init_per_testcase(protocol_versions, Config) ->
ssl:stop(),
@@ -286,6 +285,14 @@ init_per_testcase(empty_protocol_versions, Config) ->
ssl:start(),
Config;
+init_per_testcase(fallback, Config) ->
+ case tls_record:highest_protocol_version([]) of
+ {3, N} when N > 1 ->
+ Config;
+ _ ->
+ {skip, "Not relevant if highest supported version is less than 3.2"}
+ end;
+
%% init_per_testcase(different_ca_peer_sign, Config0) ->
%% ssl_test_lib:make_mix_cert(Config0);
@@ -343,7 +350,7 @@ alerts(Config) when is_list(Config) ->
end, Alerts).
%%--------------------------------------------------------------------
new_options_in_accept() ->
- [{doc,"Test that you can set ssl options in ssl_accept/3 and not tcp upgrade"}].
+ [{doc,"Test that you can set ssl options in ssl_accept/3 and not only in tcp upgrade"}].
new_options_in_accept(Config) when is_list(Config) ->
ClientOpts = ?config(client_opts, Config),
ServerOpts0 = ?config(server_dsa_opts, Config),
@@ -361,7 +368,9 @@ new_options_in_accept(Config) when is_list(Config) ->
{host, Hostname},
{from, self()},
{mfa, {?MODULE, connection_info_result, []}},
- {options, [{versions, [sslv3]} | ClientOpts]}]),
+ {options, [{versions, [sslv3]},
+ {ciphers,[{rsa,rc4_128,sha}
+ ]} | ClientOpts]}]),
ct:log("Testcase ~p, Client ~p Server ~p ~n",
[self(), Client, Server]),
@@ -391,7 +400,7 @@ connection_info(Config) when is_list(Config) ->
{from, self()},
{mfa, {?MODULE, connection_info_result, []}},
{options,
- [{ciphers,[{rsa,rc4_128,sha,no_export}]} |
+ [{ciphers,[{rsa,des_cbc,sha,no_export}]} |
ClientOpts]}]),
ct:log("Testcase ~p, Client ~p Server ~p ~n",
@@ -400,7 +409,7 @@ connection_info(Config) when is_list(Config) ->
Version =
tls_record:protocol_version(tls_record:highest_protocol_version([])),
- ServerMsg = ClientMsg = {ok, {Version, {rsa,rc4_128,sha}}},
+ ServerMsg = ClientMsg = {ok, {Version, {rsa, des_cbc, sha}}},
ssl_test_lib:check_result(Server, ServerMsg, Client, ClientMsg),
@@ -629,7 +638,7 @@ clear_pem_cache(Config) when is_list(Config) ->
{status, _, _, StatusInfo} = sys:get_status(whereis(ssl_manager)),
[_, _,_, _, Prop] = StatusInfo,
State = ssl_test_lib:state(Prop),
- [_,FilRefDb, _] = element(6, State),
+ [_,FilRefDb |_] = element(6, State),
{Server, Client} = basic_verify_test_no_close(Config),
2 = ets:info(FilRefDb, size),
ssl:clear_pem_cache(),
@@ -648,6 +657,34 @@ clear_pem_cache(Config) when is_list(Config) ->
0 = ets:info(FilRefDb, size).
%%--------------------------------------------------------------------
+
+fallback() ->
+ [{doc, "Test TLS_FALLBACK_SCSV downgrade prevention"}].
+
+fallback(Config) when is_list(Config) ->
+ ClientOpts = ?config(client_opts, Config),
+ ServerOpts = ?config(server_opts, Config),
+ {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+
+ Server =
+ ssl_test_lib:start_server_error([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {options, ServerOpts}]),
+
+ Port = ssl_test_lib:inet_port(Server),
+
+ Client =
+ ssl_test_lib:start_client_error([{node, ClientNode},
+ {port, Port}, {host, Hostname},
+ {from, self()}, {options,
+ [{fallback, true},
+ {versions, ['tlsv1']}
+ | ClientOpts]}]),
+
+ ssl_test_lib:check_result(Server, {error,{tls_alert,"inappropriate fallback"}},
+ Client, {error,{tls_alert,"inappropriate fallback"}}).
+
+%%--------------------------------------------------------------------
peername() ->
[{doc,"Test API function peername/1"}].
@@ -1779,6 +1816,32 @@ srp_dsa_cipher_suites(Config) when is_list(Config) ->
Version = tls_record:protocol_version(tls_record:highest_protocol_version([])),
Ciphers = ssl_test_lib:srp_dss_suites(),
run_suites(Ciphers, Version, Config, srp_dsa).
+%%-------------------------------------------------------------------
+rc4_rsa_cipher_suites()->
+ [{doc, "Test the RC4 ciphersuites"}].
+rc4_rsa_cipher_suites(Config) when is_list(Config) ->
+ NVersion = tls_record:highest_protocol_version([]),
+ Version = tls_record:protocol_version(NVersion),
+ Ciphers = ssl_test_lib:rc4_suites(NVersion),
+ run_suites(Ciphers, Version, Config, rc4_rsa).
+%-------------------------------------------------------------------
+rc4_ecdh_rsa_cipher_suites()->
+ [{doc, "Test the RC4 ciphersuites"}].
+rc4_ecdh_rsa_cipher_suites(Config) when is_list(Config) ->
+ NVersion = tls_record:highest_protocol_version([]),
+ Version = tls_record:protocol_version(NVersion),
+ Ciphers = ssl_test_lib:rc4_suites(NVersion),
+ run_suites(Ciphers, Version, Config, rc4_ecdh_rsa).
+
+%%-------------------------------------------------------------------
+rc4_ecdsa_cipher_suites()->
+ [{doc, "Test the RC4 ciphersuites"}].
+rc4_ecdsa_cipher_suites(Config) when is_list(Config) ->
+ NVersion = tls_record:highest_protocol_version([]),
+ Version = tls_record:protocol_version(NVersion),
+ Ciphers = ssl_test_lib:rc4_suites(NVersion),
+ run_suites(Ciphers, Version, Config, rc4_ecdsa).
+
%%--------------------------------------------------------------------
default_reject_anonymous()->
[{doc,"Test that by default anonymous cipher suites are rejected "}].
@@ -2507,6 +2570,16 @@ no_reuses_session_server_restart_new_cert_file(Config) when is_list(Config) ->
ssl_test_lib:close(Client1).
%%--------------------------------------------------------------------
+defaults(Config) when is_list(Config)->
+ [_,
+ {supported, Supported},
+ {available, Available}]
+ = ssl:versions(),
+ true = lists:member(sslv3, Available),
+ false = lists:member(sslv3, Supported),
+ false = lists:member({rsa,rc4_128,sha}, ssl:cipher_suites()),
+ true = lists:member({rsa,rc4_128,sha}, ssl:cipher_suites(all)).
+%%--------------------------------------------------------------------
reuseaddr() ->
[{doc,"Test reuseaddr option"}].
@@ -2631,6 +2704,8 @@ honor_cipher_order(Config, Honor, ServerCiphers, ClientCiphers, Expected) ->
ssl_test_lib:close(Client).
%%--------------------------------------------------------------------
+ciphersuite_vs_version() ->
+ [{doc,"Test a SSLv3 client can not negotiate a TLSv* cipher suite."}].
ciphersuite_vs_version(Config) when is_list(Config) ->
{_ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
@@ -3694,8 +3769,20 @@ run_suites(Ciphers, Version, Config, Type) ->
?config(server_ecdsa_opts, Config)};
ecdh_rsa ->
{?config(client_opts, Config),
- ?config(server_ecdh_rsa_opts, Config)}
- end,
+ ?config(server_ecdh_rsa_opts, Config)};
+ rc4_rsa ->
+ {?config(client_opts, Config),
+ [{ciphers, Ciphers} |
+ ?config(server_opts, Config)]};
+ rc4_ecdh_rsa ->
+ {?config(client_opts, Config),
+ [{ciphers, Ciphers} |
+ ?config(server_ecdh_rsa_opts, Config)]};
+ rc4_ecdsa ->
+ {?config(client_opts, Config),
+ [{ciphers, Ciphers} |
+ ?config(server_ecdsa_opts, Config)]}
+ end,
Result = lists:map(fun(Cipher) ->
cipher(Cipher, Version, Config, ClientOpts, ServerOpts) end,
@@ -3716,6 +3803,7 @@ erlang_cipher_suite(Suite) ->
cipher(CipherSuite, Version, Config, ClientOpts, ServerOpts) ->
%% process_flag(trap_exit, true),
ct:log("Testing CipherSuite ~p~n", [CipherSuite]),
+ ct:log("Server Opts ~p~n", [ServerOpts]),
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
ErlangCipherSuite = erlang_cipher_suite(CipherSuite),
diff --git a/lib/ssl/test/ssl_certificate_verify_SUITE.erl b/lib/ssl/test/ssl_certificate_verify_SUITE.erl
index b7864ba6e7..dab7a941db 100644
--- a/lib/ssl/test/ssl_certificate_verify_SUITE.erl
+++ b/lib/ssl/test/ssl_certificate_verify_SUITE.erl
@@ -443,7 +443,7 @@ verify_fun_always_run_client(Config) when is_list(Config) ->
{unknown, UserState};
(_, valid, [ChainLen]) ->
{valid, [ChainLen + 1]};
- (_, valid_peer, [2]) ->
+ (_, valid_peer, [1]) ->
{fail, "verify_fun_was_always_run"};
(_, valid_peer, UserState) ->
{valid, UserState}
@@ -482,7 +482,7 @@ verify_fun_always_run_server(Config) when is_list(Config) ->
{unknown, UserState};
(_, valid, [ChainLen]) ->
{valid, [ChainLen + 1]};
- (_, valid_peer, [2]) ->
+ (_, valid_peer, [1]) ->
{fail, "verify_fun_was_always_run"};
(_, valid_peer, UserState) ->
{valid, UserState}
diff --git a/lib/ssl/test/ssl_cipher_SUITE.erl b/lib/ssl/test/ssl_cipher_SUITE.erl
index f2dc1b52c1..3433f9a445 100644
--- a/lib/ssl/test/ssl_cipher_SUITE.erl
+++ b/lib/ssl/test/ssl_cipher_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -38,7 +38,7 @@
suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
- [aes_decipher_good, aes_decipher_good_tls11, aes_decipher_fail, aes_decipher_fail_tls11].
+ [aes_decipher_good, aes_decipher_fail, padding_test].
groups() ->
[].
@@ -73,93 +73,122 @@ end_per_testcase(_TestCase, Config) ->
%% Test Cases --------------------------------------------------------
%%--------------------------------------------------------------------
aes_decipher_good() ->
- [{doc,"Decipher a known cryptotext."}].
+ [{doc,"Decipher a known cryptotext using a correct key"}].
aes_decipher_good(Config) when is_list(Config) ->
HashSz = 32,
- CipherState = #cipher_state{iv = <<59,201,85,117,188,206,224,136,5,109,46,70,104,79,4,9>>,
- key = <<72,196,247,97,62,213,222,109,210,204,217,186,172,184,197,148>>},
- Fragment = <<220,193,179,139,171,33,143,245,202,47,123,251,13,232,114,8,
- 190,162,74,31,186,227,119,155,94,74,119,79,169,193,240,160,
- 198,181,81,19,98,162,213,228,74,224,253,168,156,59,195,122,
- 108,101,107,242,20,15,169,150,163,107,101,94,93,104,241,165>>,
- Content = <<183,139,16,132,10,209,67,86,168,100,61,217,145,57,36,56, "HELLO\n">>,
- Mac = <<71,136,212,107,223,200,70,232,127,116,148,205,232,35,158,113,237,174,15,217,192,168,35,8,6,107,107,233,25,174,90,111>>,
- Version = {3,0},
- {Content, Mac, _} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, Fragment, Version),
- Version1 = {3,1},
- {Content, Mac, _} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, Fragment, Version1),
- ok.
+ CipherState = correct_cipher_state(),
+ decipher_check_good(HashSz, CipherState, {3,0}),
+ decipher_check_good(HashSz, CipherState, {3,1}),
+ decipher_check_good(HashSz, CipherState, {3,2}),
+ decipher_check_good(HashSz, CipherState, {3,3}).
%%--------------------------------------------------------------------
-
-aes_decipher_good_tls11() ->
- [{doc,"Decipher a known TLS 1.1 cryptotext."}].
-
-%% the fragment is actuall a TLS 1.1 record, with
-%% Version = TLS 1.1, we get the correct NextIV in #cipher_state
-aes_decipher_good_tls11(Config) when is_list(Config) ->
- HashSz = 32,
- CipherState = #cipher_state{iv = <<59,201,85,117,188,206,224,136,5,109,46,70,104,79,4,9>>,
- key = <<72,196,247,97,62,213,222,109,210,204,217,186,172,184,197,148>>},
- Fragment = <<220,193,179,139,171,33,143,245,202,47,123,251,13,232,114,8,
- 190,162,74,31,186,227,119,155,94,74,119,79,169,193,240,160,
- 198,181,81,19,98,162,213,228,74,224,253,168,156,59,195,122,
- 108,101,107,242,20,15,169,150,163,107,101,94,93,104,241,165>>,
- Content = <<"HELLO\n">>,
- NextIV = <<183,139,16,132,10,209,67,86,168,100,61,217,145,57,36,56>>,
- Mac = <<71,136,212,107,223,200,70,232,127,116,148,205,232,35,158,113,237,174,15,217,192,168,35,8,6,107,107,233,25,174,90,111>>,
- Version = {3,2},
- {Content, Mac, #cipher_state{iv = NextIV}} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, Fragment, Version),
- Version1 = {3,2},
- {Content, Mac, #cipher_state{iv = NextIV}} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, Fragment, Version1),
- ok.
-
-%%--------------------------------------------------------------------
-
aes_decipher_fail() ->
- [{doc,"Decipher a known cryptotext."}].
+ [{doc,"Decipher a known cryptotext using a incorrect key"}].
-%% same as above, last byte of key replaced
aes_decipher_fail(Config) when is_list(Config) ->
HashSz = 32,
- CipherState = #cipher_state{iv = <<59,201,85,117,188,206,224,136,5,109,46,70,104,79,4,9>>,
- key = <<72,196,247,97,62,213,222,109,210,204,217,186,172,184,197,254>>},
- Fragment = <<220,193,179,139,171,33,143,245,202,47,123,251,13,232,114,8,
- 190,162,74,31,186,227,119,155,94,74,119,79,169,193,240,160,
- 198,181,81,19,98,162,213,228,74,224,253,168,156,59,195,122,
- 108,101,107,242,20,15,169,150,163,107,101,94,93,104,241,165>>,
- Version = {3,0},
- {Content, Mac, _} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, Fragment, Version),
- 32 = byte_size(Content),
- 32 = byte_size(Mac),
- Version1 = {3,1},
- {Content1, Mac1, _} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, Fragment, Version1),
- 32 = byte_size(Content1),
- 32 = byte_size(Mac1),
- ok.
+ CipherState = incorrect_cipher_state(),
+ decipher_check_fail(HashSz, CipherState, {3,0}),
+ decipher_check_fail(HashSz, CipherState, {3,1}),
+ decipher_check_fail(HashSz, CipherState, {3,2}),
+ decipher_check_fail(HashSz, CipherState, {3,3}).
%%--------------------------------------------------------------------
-
-aes_decipher_fail_tls11() ->
- [{doc,"Decipher a known TLS 1.1 cryptotext."}].
-
-%% same as above, last byte of key replaced
-%% stricter padding checks in TLS 1.1 mean we get an alert instead
-aes_decipher_fail_tls11(Config) when is_list(Config) ->
- HashSz = 32,
- CipherState = #cipher_state{iv = <<59,201,85,117,188,206,224,136,5,109,46,70,104,79,4,9>>,
- key = <<72,196,247,97,62,213,222,109,210,204,217,186,172,184,197,254>>},
- Fragment = <<220,193,179,139,171,33,143,245,202,47,123,251,13,232,114,8,
- 190,162,74,31,186,227,119,155,94,74,119,79,169,193,240,160,
- 198,181,81,19,98,162,213,228,74,224,253,168,156,59,195,122,
- 108,101,107,242,20,15,169,150,163,107,101,94,93,104,241,165>>,
- Version = {3,2},
- #alert{level = ?FATAL, description = ?BAD_RECORD_MAC} =
- ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, Fragment, Version),
- Version1 = {3,3},
- #alert{level = ?FATAL, description = ?BAD_RECORD_MAC} =
- ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, Fragment, Version1),
- ok.
-
+padding_test(Config) when is_list(Config) ->
+ HashSz = 16,
+ CipherState = correct_cipher_state(),
+ pad_test(HashSz, CipherState, {3,0}),
+ pad_test(HashSz, CipherState, {3,1}),
+ pad_test(HashSz, CipherState, {3,2}),
+ pad_test(HashSz, CipherState, {3,3}).
+
+%%--------------------------------------------------------------------
+% Internal functions --------------------------------------------------------
%%--------------------------------------------------------------------
+decipher_check_good(HashSz, CipherState, Version) ->
+ {Content, NextIV, Mac} = content_nextiv_mac(Version),
+ {Content, Mac, _} =
+ ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, aes_fragment(Version), Version, true).
+
+decipher_check_fail(HashSz, CipherState, Version) ->
+ {Content, NextIV, Mac} = content_nextiv_mac(Version),
+ true = {Content, Mac, #cipher_state{iv = NextIV}} =/=
+ ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, aes_fragment(Version), Version, true).
+
+pad_test(HashSz, CipherState, {3,0} = Version) ->
+ %% 3.0 does not have padding test
+ {Content, NextIV, Mac} = badpad_content_nextiv_mac(Version),
+ {Content, Mac, #cipher_state{iv = NextIV}} =
+ ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, badpad_aes_fragment({3,0}), {3,0}, true),
+ {Content, Mac, #cipher_state{iv = NextIV}} =
+ ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, badpad_aes_fragment({3,0}), {3,0}, false);
+pad_test(HashSz, CipherState, {3,1} = Version) ->
+ %% 3.1 should have padding test, but may be disabled
+ {Content, NextIV, Mac} = badpad_content_nextiv_mac(Version),
+ BadCont = badpad_content(Content),
+ {Content, Mac, #cipher_state{iv = NextIV}} =
+ ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, badpad_aes_fragment({3,1}) , {3,1}, false),
+ {BadCont, Mac, #cipher_state{iv = NextIV}} =
+ ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, badpad_aes_fragment({3,1}), {3,1}, true);
+pad_test(HashSz, CipherState, Version) ->
+ %% 3.2 and 3.3 must have padding test
+ {Content, NextIV, Mac} = badpad_content_nextiv_mac(Version),
+ BadCont = badpad_content(Content),
+ {BadCont, Mac, #cipher_state{iv = NextIV}} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState,
+ badpad_aes_fragment(Version), Version, false),
+ {BadCont, Mac, #cipher_state{iv = NextIV}} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState,
+ badpad_aes_fragment(Version), Version, true).
+
+aes_fragment({3,N}) when N == 0; N == 1->
+ <<197,9,6,109,242,87,80,154,85,250,110,81,119,95,65,185,53,206,216,153,246,169,
+ 119,177,178,238,248,174,253,220,242,81,33,0,177,251,91,44,247,53,183,198,165,
+ 63,20,194,159,107>>;
+
+aes_fragment(_) ->
+ <<220,193,179,139,171,33,143,245,202,47,123,251,13,232,114,8,
+ 190,162,74,31,186,227,119,155,94,74,119,79,169,193,240,160,
+ 198,181,81,19,98,162,213,228,74,224,253,168,156,59,195,122,
+ 108,101,107,242,20,15,169,150,163,107,101,94,93,104,241,165>>.
+
+badpad_aes_fragment({3,N}) when N == 0; N == 1 ->
+ <<186,139,125,10,118,21,26,248,120,108,193,104,87,118,145,79,225,55,228,10,105,
+ 30,190,37,1,88,139,243,210,99,65,41>>;
+badpad_aes_fragment(_) ->
+ <<137,31,14,77,228,80,76,103,183,125,55,250,68,190,123,131,117,23,229,180,207,
+ 94,121,137,117,157,109,99,113,61,190,138,131,229,201,120,142,179,172,48,77,
+ 234,19,240,33,38,91,93>>.
+
+content_nextiv_mac({3,N}) when N == 0; N == 1 ->
+ {<<"HELLO\n">>,
+ <<72,196,247,97,62,213,222,109,210,204,217,186,172,184, 197,148>>,
+ <<71,136,212,107,223,200,70,232,127,116,148,205,232,35,158,113,237,174,15,217,192,168,35,8,6,107,107,233,25,174,90,111>>};
+content_nextiv_mac(_) ->
+ {<<"HELLO\n">>,
+ <<183,139,16,132,10,209,67,86,168,100,61,217,145,57,36,56>>,
+ <<71,136,212,107,223,200,70,232,127,116,148,205,232,35,158,113,237,174,15,217,192,168,35,8,6,107,107,233,25,174,90,111>>}.
+
+badpad_content_nextiv_mac({3,N}) when N == 0; N == 1 ->
+ {<<"HELLO\n">>,
+ <<225,55,228,10,105,30,190,37,1,88,139,243,210,99,65,41>>,
+ <<183,139,16,132,10,209,67,86,168,100,61,217,145,57,36,56>>
+ };
+badpad_content_nextiv_mac(_) ->
+ {<<"HELLO\n">>,
+ <<133,211,45,189,179,229,56,86,11,178,239,159,14,160,253,140>>,
+ <<183,139,16,132,10,209,67,86,168,100,61,217,145,57,36,56>>
+ }.
+
+badpad_content(Content) ->
+ %% BadContent will fail mac test
+ <<16#F0, Content/binary>>.
+
+correct_cipher_state() ->
+ #cipher_state{iv = <<59,201,85,117,188,206,224,136,5,109,46,70,104,79,4,9>>,
+ key = <<72,196,247,97,62,213,222,109,210,204,217,186,172,184,197,148>>}.
+
+incorrect_cipher_state() ->
+ #cipher_state{iv = <<59,201,85,117,188,206,224,136,5,109,46,70,104,79,4,9>>,
+ key = <<72,196,247,97,62,213,222,109,210,204,217,186,172,184,197,254>>}.
+
diff --git a/lib/ssl/test/ssl_crl_SUITE.erl b/lib/ssl/test/ssl_crl_SUITE.erl
index bad0949ec4..c6bf8898ad 100644
--- a/lib/ssl/test/ssl_crl_SUITE.erl
+++ b/lib/ssl/test/ssl_crl_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -26,43 +26,40 @@
-include_lib("common_test/include/ct.hrl").
-include_lib("public_key/include/public_key.hrl").
--define(TIMEOUT, 120000).
-define(LONG_TIMEOUT, 600000).
--define(SLEEP, 1000).
--define(OPENSSL_RENEGOTIATE, "R\n").
--define(OPENSSL_QUIT, "Q\n").
--define(OPENSSL_GARBAGE, "P\n").
--define(EXPIRE, 10).
%%--------------------------------------------------------------------
%% Common Test interface functions -----------------------------------
%%--------------------------------------------------------------------
-suite() -> [{ct_hooks,[ts_install_cth]}].
+suite() ->
+ [{ct_hooks,[ts_install_cth]}].
all() ->
[
- {group, basic},
- {group, v1_crl},
- {group, idp_crl}
+ {group, check_true},
+ {group, check_peer},
+ {group, check_best_effort}
].
groups() ->
- [{basic, [], basic_tests()},
- {v1_crl, [], v1_crl_tests()},
- {idp_crl, [], idp_crl_tests()}].
+ [
+ {check_true, [], [{group, v2_crl},
+ {group, v1_crl},
+ {group, idp_crl}]},
+ {check_peer, [], [{group, v2_crl},
+ {group, v1_crl},
+ {group, idp_crl}]},
+ {check_best_effort, [], [{group, v2_crl},
+ {group, v1_crl},
+ {group, idp_crl}]},
+ {v2_crl, [], basic_tests()},
+ {v1_crl, [], basic_tests()},
+ {idp_crl, [], basic_tests()}].
basic_tests() ->
[crl_verify_valid, crl_verify_revoked].
-v1_crl_tests() ->
- [crl_verify_valid, crl_verify_revoked].
-
-idp_crl_tests() ->
- [crl_verify_valid, crl_verify_revoked].
-
-%%%================================================================
-%%% Suite init/end
init_per_suite(Config0) ->
Dog = ct:timetrap(?LONG_TIMEOUT *2),
@@ -70,10 +67,7 @@ init_per_suite(Config0) ->
false ->
{skip, "Openssl not found"};
_ ->
- TLSVersion = ?config(tls_version, Config0),
OpenSSL_version = (catch os:cmd("openssl version")),
- ct:log("TLS version: ~p~nOpenSSL version: ~p~n~n~p:module_info(): ~p~n~nssl:module_info(): ~p~n",
- [TLSVersion, OpenSSL_version, ?MODULE, ?MODULE:module_info(), ssl:module_info()]),
case ssl_test_lib:enough_openssl_crl_support(OpenSSL_version) of
false ->
{skip, io_lib:format("Bad openssl version: ~p",[OpenSSL_version])};
@@ -81,7 +75,6 @@ init_per_suite(Config0) ->
catch crypto:stop(),
try crypto:start() of
ok ->
- ssl:start(),
{ok, Hostname0} = inet:gethostname(),
IPfamily =
case lists:member(list_to_atom(Hostname0), ct:get_config(ipv6_hosts,[])) of
@@ -89,8 +82,7 @@ init_per_suite(Config0) ->
false -> inet
end,
[{ipfamily,IPfamily}, {watchdog, Dog}, {openssl_version,OpenSSL_version} | Config0]
- catch _C:_E ->
- ct:log("crypto:start() caught ~p:~p",[_C,_E]),
+ catch _:_ ->
{skip, "Crypto did not start"}
end
end
@@ -100,443 +92,175 @@ end_per_suite(_Config) ->
ssl:stop(),
application:stop(crypto).
-%%%================================================================
-%%% Group init/end
-
-init_per_group(Group, Config) ->
- ssl:start(),
- inets:start(),
- CertDir = filename:join(?config(priv_dir, Config), Group),
- DataDir = ?config(data_dir, Config),
- ServerRoot = make_dir_path([?config(priv_dir,Config), Group, tmp]),
- %% start a HTTP server to serve the CRLs
- {ok, Httpd} = inets:start(httpd, [{ipfamily, ?config(ipfamily,Config)},
- {server_name, "localhost"}, {port, 0},
- {server_root, ServerRoot},
- {document_root, CertDir},
- {modules, [mod_get]}
- ]),
- [{port,Port}] = httpd:info(Httpd, [port]),
- ct:log("~p:~p~nHTTPD IP family=~p, port=~p~n", [?MODULE, ?LINE, ?config(ipfamily,Config), Port]),
- CertOpts = [{crl_port,Port}|cert_opts(Group)],
- Result = make_certs:all(DataDir, CertDir, CertOpts),
- ct:log("~p:~p~nmake_certs:all(~n DataDir=~p,~n CertDir=~p,~n ServerRoot=~p~n Opts=~p~n) returned ~p~n", [?MODULE,?LINE,DataDir, CertDir, ServerRoot, CertOpts, Result]),
- [{make_cert_result, Result}, {cert_dir, CertDir}, {httpd, Httpd} | Config].
-
-cert_opts(v1_crl) -> [{v2_crls, false}];
-cert_opts(idp_crl) -> [{issuing_distribution_point, true}];
-cert_opts(_) -> [].
-
-make_dir_path(PathComponents) ->
- lists:foldl(fun(F,P0) -> file:make_dir(P=filename:join(P0,F)), P end,
- "",
- PathComponents).
-
+init_per_group(check_true, Config) ->
+ [{crl_check, true} | Config];
+init_per_group(check_peer, Config) ->
+ [{crl_check, peer} | Config];
+init_per_group(check_best_effort, Config) ->
+ [{crl_check, best_effort} | Config];
+init_per_group(Group, Config0) ->
+ case is_idp(Group) of
+ true ->
+ [{idp_crl, true} | Config0];
+ false ->
+ DataDir = ?config(data_dir, Config0),
+ CertDir = filename:join(?config(priv_dir, Config0), Group),
+ {CertOpts, Config} = init_certs(CertDir, Group, Config0),
+ Result = make_certs:all(DataDir, CertDir, CertOpts),
+ [{make_cert_result, Result}, {cert_dir, CertDir}, {idp_crl, false} | Config]
+ end.
end_per_group(_GroupName, Config) ->
- case ?config(httpd, Config) of
- undefined -> ok;
- Pid ->
- ct:log("Stop httpd ~p",[Pid]),
- ok = inets:stop(httpd, Pid)
- ,ct:log("Stopped",[])
- end,
- inets:stop(),
+
Config.
+init_per_testcase(Case, Config0) ->
+ case ?config(idp_crl, Config0) of
+ true ->
+ end_per_testcase(Case, Config0),
+ inets:start(),
+ ssl:start(),
+ ServerRoot = make_dir_path([?config(priv_dir, Config0), idp_crl, tmp]),
+ %% start a HTTP server to serve the CRLs
+ {ok, Httpd} = inets:start(httpd, [{ipfamily, ?config(ipfamily, Config0)},
+ {server_name, "localhost"}, {port, 0},
+ {server_root, ServerRoot},
+ {document_root,
+ filename:join(?config(priv_dir, Config0), idp_crl)}
+ ]),
+ [{port,Port}] = httpd:info(Httpd, [port]),
+ Config = [{httpd_port, Port} | Config0],
+ DataDir = ?config(data_dir, Config),
+ CertDir = filename:join(?config(priv_dir, Config0), idp_crl),
+ {CertOpts, Config} = init_certs(CertDir, idp_crl, Config),
+ Result = make_certs:all(DataDir, CertDir, CertOpts),
+ [{make_cert_result, Result}, {cert_dir, CertDir} | Config];
+ false ->
+ end_per_testcase(Case, Config0),
+ ssl:start(),
+ Config0
+ end.
+
+end_per_testcase(_, Config) ->
+ case ?config(idp_crl, Config) of
+ true ->
+ ssl:stop(),
+ inets:stop();
+ false ->
+ ssl:stop()
+ end.
+
%%%================================================================
%%% Test cases
+%%%================================================================
crl_verify_valid() ->
[{doc,"Verify a simple valid CRL chain"}].
crl_verify_valid(Config) when is_list(Config) ->
- process_flag(trap_exit, true),
PrivDir = ?config(cert_dir, Config),
- ServerOpts = [{keyfile, filename:join([PrivDir, "server", "key.pem"])},
- {certfile, filename:join([PrivDir, "server", "cert.pem"])},
- {cacertfile, filename:join([PrivDir, "server", "cacerts.pem"])}],
-
+ Check = ?config(crl_check, Config),
+ ServerOpts = [{keyfile, filename:join([PrivDir, "server", "key.pem"])},
+ {certfile, filename:join([PrivDir, "server", "cert.pem"])},
+ {cacertfile, filename:join([PrivDir, "server", "cacerts.pem"])}],
+ ClientOpts = case ?config(idp_crl, Config) of
+ true ->
+ [{cacertfile, filename:join([PrivDir, "server", "cacerts.pem"])},
+ {crl_check, Check},
+ {crl_cache, {ssl_crl_cache, {internal, [{http, 5000}]}}},
+ {verify, verify_peer}];
+ false ->
+ [{cacertfile, filename:join([PrivDir, "server", "cacerts.pem"])},
+ {crl_check, Check},
+ {verify, verify_peer}]
+ end,
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
- Data = "From openssl to erlang",
- Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
- {from, self()},
- {mfa, {?MODULE, erlang_ssl_receive, [Data]}},
- %{mfa, {ssl_test_lib, no_result, []}},
- {options, ServerOpts}]),
- ct:log("~p:~p~nreturn from ssl_test_lib:start_server:~n~p",[?MODULE,?LINE,Server]),
- Port = ssl_test_lib:inet_port(Server),
-
- CACerts = load_cert(filename:join([PrivDir, "erlangCA", "cacerts.pem"])),
-
- ClientOpts = [{cacerts, CACerts},
- {verify, verify_peer},
- {verify_fun, {fun validate_function/3, {CACerts, []}}}],
-
-
- ct:log("~p:~p~ncalling ssl_test_lib:start_client",[?MODULE,?LINE]),
- Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
- {host, Hostname},
- {from, self()},
- {mfa, {?MODULE,
- erlang_ssl_send, [Data]}},
- %{mfa, {ssl_test_lib, no_result, []}},
- {options, ClientOpts}]),
- ct:log("~p:~p~nreturn from ssl_test_lib:start_client:~n~p",[?MODULE,?LINE,Client]),
-
- ssl_test_lib:check_result(Client, ok, Server, ok),
-
- %% Clean close down! Server needs to be closed first !!
- ssl_test_lib:close(Server),
- ssl_test_lib:close(Client),
- process_flag(trap_exit, false).
+ ssl_crl_cache:insert({file, filename:join([PrivDir, "erlangCA", "crl.pem"])}),
+ ssl_crl_cache:insert({file, filename:join([PrivDir, "otpCA", "crl.pem"])}),
+
+ crl_verify_valid(Hostname, ServerNode, ServerOpts, ClientNode, ClientOpts).
crl_verify_revoked() ->
- [{doc,"Verify a simple valid CRL chain"}].
-crl_verify_revoked(Config) when is_list(Config) ->
- process_flag(trap_exit, true),
+ [{doc,"Verify a simple CRL chain when peer cert is reveoked"}].
+crl_verify_revoked(Config) when is_list(Config) ->
PrivDir = ?config(cert_dir, Config),
+ Check = ?config(crl_check, Config),
ServerOpts = [{keyfile, filename:join([PrivDir, "revoked", "key.pem"])},
- {certfile, filename:join([PrivDir, "revoked", "cert.pem"])},
- {cacertfile, filename:join([PrivDir, "revoked", "cacerts.pem"])}],
- ct:log("~p:~p~nserver opts ~p~n", [?MODULE,?LINE, ServerOpts]),
+ {certfile, filename:join([PrivDir, "revoked", "cert.pem"])},
+ {cacertfile, filename:join([PrivDir, "revoked", "cacerts.pem"])}],
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
- Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
- {from, self()},
- %{mfa, {?MODULE, erlang_ssl_receive, [Data]}},
- {mfa, {ssl_test_lib, no_result, []}},
- {options, ServerOpts}]),
+ Server = ssl_test_lib:start_server_error([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {options, ServerOpts}]),
Port = ssl_test_lib:inet_port(Server),
+
+ ssl_crl_cache:insert({file, filename:join([PrivDir, "erlangCA", "crl.pem"])}),
+ ssl_crl_cache:insert({file, filename:join([PrivDir, "otpCA", "crl.pem"])}),
+
+ ClientOpts = case ?config(idp_crl, Config) of
+ true ->
+ [{cacertfile, filename:join([PrivDir, "revoked", "cacerts.pem"])},
+ {crl_cache, {ssl_crl_cache, {internal, [{http, 5000}]}}},
+ {crl_check, Check},
+ {verify, verify_peer}];
+ false ->
+ [{cacertfile, filename:join([PrivDir, "revoked", "cacerts.pem"])},
+ {crl_check, Check},
+ {verify, verify_peer}]
+ end,
+
+ Client = ssl_test_lib:start_client_error([{node, ClientNode}, {port, Port},
+ {host, Hostname},
+ {from, self()},
+ {options, ClientOpts}]),
+ receive
+ {Server, AlertOrColse} ->
+ ct:pal("Server Alert or Close ~p", [AlertOrColse])
+ end,
+ ssl_test_lib:check_result(Client, {error, {tls_alert, "certificate revoked"}}).
- CACerts = load_cert(filename:join([PrivDir, "erlangCA", "cacerts.pem"])),
- ClientOpts = [{cacerts, CACerts},
- {verify, verify_peer},
- {verify_fun, {fun validate_function/3, {CACerts, []}}}],
- {connect_failed, _} = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
+crl_verify_valid(Hostname, ServerNode, ServerOpts, ClientNode, ClientOpts) ->
+ Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {mfa, {ssl_test_lib,
+ send_recv_result_active, []}},
+ {options, ServerOpts}]),
+ Port = ssl_test_lib:inet_port(Server),
+ Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
{host, Hostname},
{from, self()},
- %{mfa, {?MODULE,
- %erlang_ssl_receive, [Data]}},
- {mfa, {ssl_test_lib, no_result, []}},
+ {mfa, {ssl_test_lib,
+ send_recv_result_active, []}},
{options, ClientOpts}]),
+
+ ssl_test_lib:check_result(Client, ok, Server, ok),
- %% Clean close down! Server needs to be closed first !!
ssl_test_lib:close(Server),
- process_flag(trap_exit, false).
-
-%%%================================================================
-%%% Lib
-
-erlang_ssl_receive(Socket, Data) ->
- ct:log("~p:~p~nConnection info: ~p~n",
- [?MODULE,?LINE, ssl:connection_info(Socket)]),
- receive
- {ssl, Socket, Data} ->
- ct:log("~p:~p~nReceived ~p~n",[?MODULE,?LINE, Data]),
- %% open_ssl server sometimes hangs waiting in blocking read
- ssl:send(Socket, "Got it"),
- ok;
- {ssl, Socket, Byte} when length(Byte) == 1 ->
- erlang_ssl_receive(Socket, tl(Data));
- {Port, {data,Debug}} when is_port(Port) ->
- ct:log("~p:~p~nopenssl ~s~n",[?MODULE,?LINE, Debug]),
- erlang_ssl_receive(Socket,Data);
- Other ->
- ct:fail({unexpected_message, Other})
- after 4000 ->
- ct:fail({did_not_get, Data})
- end.
-
-
-erlang_ssl_send(Socket, Data) ->
- ct:log("~p:~p~nConnection info: ~p~n",
- [?MODULE,?LINE, ssl:connection_info(Socket)]),
- ssl:send(Socket, Data),
- ok.
-
-load_certs(undefined) ->
- undefined;
-load_certs(CertDir) ->
- case file:list_dir(CertDir) of
- {ok, Certs} ->
- load_certs(lists:map(fun(Cert) -> filename:join(CertDir, Cert)
- end, Certs), []);
- {error, _} ->
- undefined
- end.
-
-load_certs([], Acc) ->
- ct:log("~p:~p~nSuccessfully loaded ~p CA certificates~n", [?MODULE,?LINE, length(Acc)]),
- Acc;
-load_certs([Cert|Certs], Acc) ->
- case filelib:is_dir(Cert) of
- true ->
- load_certs(Certs, Acc);
- _ ->
- %ct:log("~p:~p~nLoading certificate ~p~n", [?MODULE,?LINE, Cert]),
- load_certs(Certs, load_cert(Cert) ++ Acc)
- end.
-
-load_cert(Cert) ->
- {ok, Bin} = file:read_file(Cert),
- case filename:extension(Cert) of
- ".der" ->
- %% no decoding necessary
- [Bin];
- _ ->
- %% assume PEM otherwise
- Contents = public_key:pem_decode(Bin),
- [DER || {Type, DER, Cipher} <- Contents, Type == 'Certificate', Cipher == 'not_encrypted']
- end.
-
-%% @doc Validator function for SSL negotiation.
-%%
-validate_function(Cert, valid_peer, State) ->
- ct:log("~p:~p~nvaliding peer ~p with ~p intermediate certs~n",
- [?MODULE,?LINE, get_common_name(Cert),
- length(element(2, State))]),
- %% peer certificate validated, now check the CRL
- Res = (catch check_crl(Cert, State)),
- ct:log("~p:~p~nCRL validate result for ~p: ~p~n",
- [?MODULE,?LINE, get_common_name(Cert), Res]),
- {Res, State};
-validate_function(Cert, valid, {TrustedCAs, IntermediateCerts}=State) ->
- case public_key:pkix_is_self_signed(Cert) of
- true ->
- ct:log("~p:~p~nroot certificate~n",[?MODULE,?LINE]),
- %% this is a root cert, no CRL
- {valid, {TrustedCAs, [Cert|IntermediateCerts]}};
- false ->
- %% check is valid CA certificate, add to the list of
- %% intermediates
- Res = (catch check_crl(Cert, State)),
- ct:log("~p:~p~nCRL intermediate CA validate result for ~p: ~p~n",
- [?MODULE,?LINE, get_common_name(Cert), Res]),
- {Res, {TrustedCAs, [Cert|IntermediateCerts]}}
- end;
-validate_function(_Cert, _Event, State) ->
- %ct:log("~p:~p~nignoring event ~p~n", [?MODULE,?LINE, _Event]),
- {valid, State}.
+ ssl_test_lib:close(Client).
-%% @doc Given a certificate, find CRL distribution points for the given
-%% certificate, fetch, and attempt to validate each CRL through
-%% issuer_function/4.
-%%
-check_crl(Cert, State) ->
- %% pull the CRL distribution point(s) out of the certificate, if any
- ct:log("~p:~p~ncheck_crl(~n Cert=~p,~nState=~p~n)",[?MODULE,?LINE,Cert,State]),
- case pubkey_cert:select_extension(
- ?'id-ce-cRLDistributionPoints',
- pubkey_cert:extensions_list(Cert#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.extensions)) of
- undefined ->
- ct:log("~p:~p~nno CRL distribution points for ~p~n",
- [?MODULE,?LINE, get_common_name(Cert)]),
- %% fail; we can't validate if there's no CRL
- no_crl;
- CRLExtension ->
- ct:log("~p:~p~nCRLExtension=~p)",[?MODULE,?LINE,CRLExtension]),
- CRLDistPoints = CRLExtension#'Extension'.extnValue,
- DPointsAndCRLs = lists:foldl(fun(Point, Acc) ->
- %% try to read the CRL over http or from a
- %% local file
- case fetch_point(Point) of
- not_available ->
- ct:log("~p:~p~nfetch_point returned~n~p~n)",[?MODULE,?LINE,not_available]),
- Acc;
- Res ->
- ct:log("~p:~p~nfetch_point returned~n~p~n)",[?MODULE,?LINE,Res]),
- [{Point, Res} | Acc]
- end
- end, [], CRLDistPoints),
- public_key:pkix_crls_validate(Cert,
- DPointsAndCRLs,
- [{issuer_fun,
- {fun issuer_function/4, State}}])
- end.
-
-%% @doc Given a list of distribution points for CRLs, certificates and
-%% both trusted and intermediary certificates, attempt to build and
-%% authority chain back via build_chain to verify that it is valid.
-%%
-issuer_function(_DP, CRL, _Issuer, {TrustedCAs, IntermediateCerts}) ->
- %% XXX the 'Issuer' we get passed here is the AuthorityKeyIdentifier,
- %% which we are not currently smart enough to understand
- %% Read the CA certs out of the file
- ct:log("~p:~p~nissuer_function(~nCRL=~p,~nLast param=~p)",[?MODULE,?LINE,CRL, {TrustedCAs, IntermediateCerts}]),
- Certs = [public_key:pkix_decode_cert(DER, otp) || DER <- TrustedCAs],
- %% get the real issuer out of the CRL
- Issuer = public_key:pkix_normalize_name(
- pubkey_cert_records:transform(
- CRL#'CertificateList'.tbsCertList#'TBSCertList'.issuer, decode)),
- %% assume certificates are ordered from root to tip
- case find_issuer(Issuer, IntermediateCerts ++ Certs) of
- undefined ->
- ct:log("~p:~p~nunable to find certificate matching CRL issuer ~p~n",
- [?MODULE,?LINE, Issuer]),
- error;
- IssuerCert ->
- ct:log("~p:~p~nIssuerCert=~p~n)",[?MODULE,?LINE,IssuerCert]),
- case build_chain({public_key:pkix_encode('OTPCertificate',
- IssuerCert,
- otp),
- IssuerCert}, IntermediateCerts, Certs, []) of
- undefined ->
- error;
- {OTPCert, Path} ->
- {ok, OTPCert, Path}
- end
- end.
-
-%% @doc Attempt to build authority chain back using intermediary
-%% certificates, falling back on trusted certificates if the
-%% intermediary chain of certificates does not fully extend to the
-%% root.
-%%
-%% Returns: {RootCA :: #OTPCertificate{}, Chain :: [der_encoded()]}
-%%
-build_chain({DER, Cert}, IntCerts, TrustedCerts, Acc) ->
- %% check if this cert is self-signed, if it is, we've reached the
- %% root of the chain
- Issuer = public_key:pkix_normalize_name(
- Cert#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.issuer),
- Subject = public_key:pkix_normalize_name(
- Cert#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.subject),
- case Issuer == Subject of
- true ->
- case find_issuer(Issuer, TrustedCerts) of
- undefined ->
- ct:log("~p:~p~nself-signed certificate is NOT trusted~n",[?MODULE,?LINE]),
- undefined;
- TrustedCert ->
- %% return the cert from the trusted list, to prevent
- %% issuer spoofing
- {TrustedCert,
- [public_key:pkix_encode(
- 'OTPCertificate', TrustedCert, otp)|Acc]}
- end;
- false ->
- Match = lists:foldl(
- fun(C, undefined) ->
- S = public_key:pkix_normalize_name(C#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.subject),
- %% compare the subject to the current issuer
- case Issuer == S of
- true ->
- %% we've found our man
- {public_key:pkix_encode('OTPCertificate', C, otp), C};
- false ->
- undefined
- end;
- (_E, A) ->
- %% already matched
- A
- end, undefined, IntCerts),
- case Match of
- undefined when IntCerts /= TrustedCerts ->
- %% continue the chain by using the trusted CAs
- ct:log("~p:~p~nRan out of intermediate certs, switching to trusted certs~n",[?MODULE,?LINE]),
- build_chain({DER, Cert}, TrustedCerts, TrustedCerts, Acc);
- undefined ->
- ct:log("Can't construct chain of trust beyond ~p~n",
- [?MODULE,?LINE, get_common_name(Cert)]),
- %% can't find the current cert's issuer
- undefined;
- Match ->
- build_chain(Match, IntCerts, TrustedCerts, [DER|Acc])
- end
- end.
-
-%% @doc Given a certificate and a list of trusted or intermediary
-%% certificates, attempt to find a match in the list or bail with
-%% undefined.
-find_issuer(Issuer, Certs) ->
- lists:foldl(
- fun(OTPCert, undefined) ->
- %% check if this certificate matches the issuer
- Normal = public_key:pkix_normalize_name(
- OTPCert#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.subject),
- case Normal == Issuer of
- true ->
- OTPCert;
- false ->
- undefined
- end;
- (_E, Acc) ->
- %% already found a match
- Acc
- end, undefined, Certs).
-
-%% @doc Find distribution points for a given CRL and then attempt to
-%% fetch the CRL from the first available.
-fetch_point(#'DistributionPoint'{distributionPoint={fullName, Names}}) ->
- Decoded = [{NameType,
- pubkey_cert_records:transform(Name, decode)}
- || {NameType, Name} <- Names],
- ct:log("~p:~p~ncall fetch(~nDecoded=~p~n)",[?MODULE,?LINE,Decoded]),
- fetch(Decoded).
-
-%% @doc Given a list of locations to retrieve a CRL from, attempt to
-%% retrieve either from a file or http resource and bail as soon as
-%% it can be found.
-%%
-%% Currently, only hand a armored PEM or DER encoded file, with
-%% defaulting to DER.
-%%
-fetch([]) ->
- not_available;
-fetch([{uniformResourceIdentifier, "http"++_=URL}|Rest]) ->
- ct:log("~p:~p~ngetting CRL from ~p~n", [?MODULE,?LINE, URL]),
- case httpc:request(get, {URL, []}, [], [{body_format, binary}]) of
- {ok, {_Status, _Headers, Body}} ->
- case Body of
- <<"-----BEGIN", _/binary>> ->
- ct:log("~p:~p~npublic_key:pem_decode,~nBody=~p~n)",[?MODULE,?LINE,Body]),
- [{'CertificateList',
- DER, _}=CertList] = public_key:pem_decode(Body),
- ct:log("~p:~p~npublic_key:pem_entry_decode,~nCertList=~p~n)",[?MODULE,?LINE,CertList]),
- {DER, public_key:pem_entry_decode(CertList)};
- _ ->
- ct:log("~p:~p~npublic_key:pem_entry_decode,~nBody=~p~n)",[?MODULE,?LINE,{'CertificateList', Body, not_encrypted}]),
- %% assume DER encoded
- try
- public_key:pem_entry_decode({'CertificateList', Body, not_encrypted})
- of
- CertList -> {Body, CertList}
- catch
- _C:_E ->
- ct:log("~p:~p~nfailed DER assumption~nRest=~p", [?MODULE,?LINE,Rest]),
- fetch(Rest)
- end
- end;
- {error, _Reason} ->
- ct:log("~p:~p~nfailed to get CRL ~p~n", [?MODULE,?LINE, _Reason]),
- fetch(Rest);
- Other ->
- ct:log("~p:~p~nreally failed to get CRL ~p~n", [?MODULE,?LINE, Other]),
- fetch(Rest)
- end;
-fetch([Loc|Rest]) ->
- %% unsupported CRL location
- ct:log("~p:~p~nunable to fetch CRL from unsupported location ~p~n",
- [?MODULE,?LINE, Loc]),
- fetch(Rest).
+%%--------------------------------------------------------------------
+%% Internal functions ------------------------------------------------
+%%--------------------------------------------------------------------
+is_idp(idp_crl) ->
+ true;
+is_idp(_) ->
+ false.
+
+init_certs(_,v1_crl, Config) ->
+ {[{v2_crls, false}], Config};
+init_certs(_, idp_crl, Config) ->
+ Port = ?config(httpd_port, Config),
+ {[{crl_port,Port},
+ {issuing_distribution_point, true}], Config
+ };
+init_certs(_,_,Config) ->
+ {[], Config}.
-%% get the common name attribute out of an OTPCertificate record
-get_common_name(OTPCert) ->
- %% You'd think there'd be an easier way than this giant mess, but I
- %% couldn't find one.
- {rdnSequence, Subject} = OTPCert#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.subject,
- case [Attribute#'AttributeTypeAndValue'.value || [Attribute] <- Subject,
- Attribute#'AttributeTypeAndValue'.type == ?'id-at-commonName'] of
- [Att] ->
- case Att of
- {teletexString, Str} -> Str;
- {printableString, Str} -> Str;
- {utf8String, Bin} -> binary_to_list(Bin)
- end;
- _ ->
- unknown
- end.
+make_dir_path(PathComponents) ->
+ lists:foldl(fun(F,P0) -> file:make_dir(P=filename:join(P0,F)), P end,
+ "",
+ PathComponents).
diff --git a/lib/ssl/test/ssl_pem_cache_SUITE.erl b/lib/ssl/test/ssl_pem_cache_SUITE.erl
new file mode 100644
index 0000000000..23584dfcdf
--- /dev/null
+++ b/lib/ssl/test/ssl_pem_cache_SUITE.erl
@@ -0,0 +1,127 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.2
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+
+-module(ssl_pem_cache_SUITE).
+
+%% Note: This directive should only be used in test suites.
+-compile(export_all).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("kernel/include/file.hrl").
+
+-define(CLEANUP_INTERVAL, 5000).
+
+%%--------------------------------------------------------------------
+%% Common Test interface functions -----------------------------------
+%%--------------------------------------------------------------------
+all() ->
+ [pem_cleanup].
+
+groups() ->
+ [].
+
+init_per_suite(Config0) ->
+ catch crypto:stop(),
+ try crypto:start() of
+ ok ->
+ ssl:start(),
+ %% make rsa certs using oppenssl
+ Result =
+ (catch make_certs:all(?config(data_dir, Config0),
+ ?config(priv_dir, Config0))),
+ ct:log("Make certs ~p~n", [Result]),
+
+ Config1 = ssl_test_lib:make_dsa_cert(Config0),
+ ssl_test_lib:cert_options(Config1)
+ catch _:_ ->
+ {skip, "Crypto did not start"}
+ end.
+
+end_per_suite(_Config) ->
+ application:stop(crypto).
+
+init_per_group(_GroupName, Config) ->
+ Config.
+
+end_per_group(_GroupName, Config) ->
+ Config.
+
+init_per_testcase(pem_cleanup, Config) ->
+ ssl:stop(),
+ application:load(ssl),
+ application:set_env(ssl, ssl_pem_cache_clean, ?CLEANUP_INTERVAL),
+ ssl:start(),
+ Config.
+
+end_per_testcase(_TestCase, Config) ->
+ %%ssl:stop(),
+ Config.
+
+%%--------------------------------------------------------------------
+%% Test Cases --------------------------------------------------------
+%%--------------------------------------------------------------------
+pem_cleanup() ->
+ [{doc, "Test pem cache invalidate mechanism"}].
+pem_cleanup(Config)when is_list(Config) ->
+ process_flag(trap_exit, true),
+ ClientOpts = ?config(client_opts, Config),
+ ServerOpts = ?config(server_opts, Config),
+ {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+
+ Server =
+ ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {mfa, {ssl_test_lib, no_result, []}},
+ {options, ServerOpts}]),
+ Port = ssl_test_lib:inet_port(Server),
+ Client =
+ ssl_test_lib:start_client([{node, ClientNode},
+ {port, Port}, {host, Hostname},
+ {mfa, {ssl_test_lib, no_result, []}},
+ {from, self()}, {options, ClientOpts}]),
+
+ Size = ssl_pkix_db:db_size(get_pem_cache()),
+ Certfile = proplists:get_value(certfile, ServerOpts),
+ {ok, FileInfo} = file:read_file_info(Certfile),
+ Time = later(),
+ ok = file:write_file_info(Certfile, FileInfo#file_info{mtime = Time}),
+ ct:sleep(2 * ?CLEANUP_INTERVAL),
+ Size1 = ssl_pkix_db:db_size(get_pem_cache()),
+ ssl_test_lib:close(Server),
+ ssl_test_lib:close(Client),
+ false = Size == Size1.
+
+get_pem_cache() ->
+ {status, _, _, StatusInfo} = sys:get_status(whereis(ssl_manager)),
+ [_, _,_, _, Prop] = StatusInfo,
+ State = ssl_test_lib:state(Prop),
+ case element(6, State) of
+ [_CertDb, _FileRefDb, PemCache| _] ->
+ PemCache;
+ _ ->
+ undefined
+ end.
+
+later()->
+ DateTime = calendar:now_to_local_time(os:timestamp()),
+ Gregorian = calendar:datetime_to_gregorian_seconds(DateTime),
+ calendar:gregorian_seconds_to_datetime(Gregorian + (2 * ?CLEANUP_INTERVAL)).
+
diff --git a/lib/ssl/test/ssl_session_cache_SUITE.erl b/lib/ssl/test/ssl_session_cache_SUITE.erl
index 06a41f1260..36d086338e 100644
--- a/lib/ssl/test/ssl_session_cache_SUITE.erl
+++ b/lib/ssl/test/ssl_session_cache_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
diff --git a/lib/ssl/test/ssl_test_lib.erl b/lib/ssl/test/ssl_test_lib.erl
index d2e6e41482..7d0546210c 100644
--- a/lib/ssl/test/ssl_test_lib.erl
+++ b/lib/ssl/test/ssl_test_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -187,6 +187,7 @@ run_client(Opts) ->
Transport = proplists:get_value(transport, Opts, ssl),
Options = proplists:get_value(options, Opts),
ct:log("~p:~p~n~p:connect(~p, ~p)@~p~n", [?MODULE,?LINE, Transport, Host, Port, Node]),
+ ct:log("SSLOpts: ~p", [Options]),
case rpc:call(Node, Transport, connect, [Host, Port, Options]) of
{ok, Socket} ->
Pid ! {connected, Socket},
@@ -253,7 +254,6 @@ check_result(Server, ServerMsg, Client, ClientMsg) ->
{Port, {data,Debug}} when is_port(Port) ->
ct:log("~p:~p~nopenssl ~s~n",[?MODULE,?LINE, Debug]),
check_result(Server, ServerMsg, Client, ClientMsg);
-
Unexpected ->
Reason = {{expected, {Client, ClientMsg}},
{expected, {Server, ServerMsg}}, {got, Unexpected}},
@@ -267,6 +267,9 @@ check_result(Pid, Msg) ->
{Port, {data,Debug}} when is_port(Port) ->
ct:log("~p:~p~nopenssl ~s~n",[?MODULE,?LINE, Debug]),
check_result(Pid,Msg);
+ %% {Port, {exit_status, Status}} when is_port(Port) ->
+ %% ct:log("~p:~p Exit status: ~p~n",[?MODULE,?LINE, Status]),
+ %% check_result(Pid, Msg);
Unexpected ->
Reason = {{expected, {Pid, Msg}},
{got, Unexpected}},
@@ -836,7 +839,7 @@ string_regex_filter(Str, Search) when is_list(Str) ->
_ ->
true
end;
-string_regex_filter(Str, _Search) ->
+string_regex_filter(_Str, _Search) ->
false.
anonymous_suites() ->
@@ -918,6 +921,10 @@ srp_dss_suites() ->
{srp_dss, aes_256_cbc, sha}],
ssl_cipher:filter_suites(Suites).
+rc4_suites(Version) ->
+ Suites = ssl_cipher:rc4_suites(Version),
+ ssl_cipher:filter_suites(Suites).
+
pem_to_der(File) ->
{ok, PemBin} = file:read_file(File),
public_key:pem_decode(PemBin).
@@ -1125,7 +1132,8 @@ filter_suites(Ciphers0) ->
Supported0 = ssl_cipher:suites(Version)
++ ssl_cipher:anonymous_suites(Version)
++ ssl_cipher:psk_suites(Version)
- ++ ssl_cipher:srp_suites(),
+ ++ ssl_cipher:srp_suites()
+ ++ ssl_cipher:rc4_suites(Version),
Supported1 = ssl_cipher:filter_suites(Supported0),
Supported2 = [ssl:suite_definition(S) || S <- Supported1],
[Cipher || Cipher <- Ciphers0, lists:member(Cipher, Supported2)].
diff --git a/lib/ssl/test/ssl_upgrade_SUITE.erl b/lib/ssl/test/ssl_upgrade_SUITE.erl
new file mode 100644
index 0000000000..c83fb367dc
--- /dev/null
+++ b/lib/ssl/test/ssl_upgrade_SUITE.erl
@@ -0,0 +1,164 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.2
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(ssl_upgrade_SUITE).
+
+%% Note: This directive should only be used in test suites.
+-compile(export_all).
+
+-include_lib("common_test/include/ct.hrl").
+
+-record(state, {
+ config,
+ server,
+ client,
+ soft
+ }).
+
+all() ->
+ [
+ minor_upgrade,
+ major_upgrade
+ ].
+
+init_per_suite(Config0) ->
+ catch crypto:stop(),
+ try {crypto:start(), erlang:system_info({wordsize, internal}) == erlang:system_info({wordsize, external})} of
+ {ok, true} ->
+ case ct_release_test:init(Config0) of
+ {skip, Reason} ->
+ {skip, Reason};
+ Config ->
+ Result =
+ (catch make_certs:all(?config(data_dir, Config),
+ ?config(priv_dir, Config))),
+ ct:log("Make certs ~p~n", [Result]),
+ ssl_test_lib:cert_options(Config)
+ end;
+ {ok, false} ->
+ {skip, "Test server will not handle halfwordemulator correctly. Skip as halfwordemulator is deprecated"}
+ catch _:_ ->
+ {skip, "Crypto did not start"}
+ end.
+
+end_per_suite(Config) ->
+ ct_release_test:cleanup(Config),
+ crypto:stop().
+
+init_per_testcase(_TestCase, Config) ->
+ Config.
+end_per_testcase(_TestCase, Config) ->
+ Config.
+
+major_upgrade(Config) when is_list(Config) ->
+ ct_release_test:upgrade(ssl, major,{?MODULE, #state{config = Config}}, Config).
+
+minor_upgrade(Config) when is_list(Config) ->
+ ct_release_test:upgrade(ssl, minor,{?MODULE, #state{config = Config}}, Config).
+
+upgrade_init(CTData, #state{config = Config} = State) ->
+ {ok, {_, _, Up, _Down}} = ct_release_test:get_appup(CTData, ssl),
+ ct:pal("Up: ~p", [Up]),
+ Soft = is_soft(Up), %% It is symmetrical, if upgrade is soft so is downgrade
+ case Soft of
+ true ->
+ {Server, Client} = soft_start_connection(Config),
+ State#state{server = Server, client = Client,
+ soft = Soft};
+ false ->
+ State#state{soft = Soft}
+ end.
+
+upgrade_upgraded(_, #state{soft = false, config = Config} = State) ->
+ {Server, Client} = restart_start_connection(Config),
+ ssl_test_lib:check_result(Server, ok, Client, ok),
+ ssl_test_lib:close(Server),
+ ssl_test_lib:close(Client),
+ State;
+
+upgrade_upgraded(_, #state{server = Server0, client = Client0,
+ config = Config, soft = true} = State) ->
+ Server0 ! changed_version,
+ Client0 ! changed_version,
+ ssl_test_lib:check_result(Server0, ok, Client0, ok),
+ ssl_test_lib:close(Server0),
+ ssl_test_lib:close(Client0),
+ {Server, Client} = soft_start_connection(Config),
+ State#state{server = Server, client = Client}.
+
+upgrade_downgraded(_, #state{soft = false, config = Config} = State) ->
+ {Server, Client} = restart_start_connection(Config),
+ ssl_test_lib:check_result(Server, ok, Client, ok),
+ ssl_test_lib:close(Server),
+ ssl_test_lib:close(Client),
+ State;
+
+upgrade_downgraded(_, #state{server = Server, client = Client, soft = true} = State) ->
+ Server ! changed_version,
+ Client ! changed_version,
+ ssl_test_lib:check_result(Server, ok, Client, ok),
+ ssl_test_lib:close(Server),
+ ssl_test_lib:close(Client),
+ State.
+
+use_connection(Socket) ->
+ ssl_test_lib:send_recv_result_active(Socket),
+ receive
+ changed_version ->
+ ssl_test_lib:send_recv_result_active(Socket)
+ end.
+
+soft_start_connection(Config) ->
+ ClientOpts = ?config(client_verification_opts, Config),
+ ServerOpts = ?config(server_verification_opts, Config),
+ {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+ Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {mfa, {?MODULE, use_connection, []}},
+ {options, ServerOpts}]),
+
+ Port = ssl_test_lib:inet_port(Server),
+ Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
+ {host, Hostname},
+ {from, self()},
+ {mfa, {?MODULE, use_connection, []}},
+ {options, ClientOpts}]),
+ {Server, Client}.
+
+restart_start_connection(Config) ->
+ ClientOpts = ?config(client_verification_opts, Config),
+ ServerOpts = ?config(server_verification_opts, Config),
+ {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+ Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {mfa, {ssl_test_lib, send_recv_result_active, []}},
+ {options, ServerOpts}]),
+
+ Port = ssl_test_lib:inet_port(Server),
+ Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
+ {host, Hostname},
+ {from, self()},
+ {mfa, {ssl_test_lib, send_recv_result_active, []}},
+ {options, ClientOpts}]),
+ {Server, Client}.
+
+is_soft([{restart_application, ssl}]) ->
+ false;
+is_soft(_) ->
+ true.
+
diff --git a/lib/ssl/vsn.mk b/lib/ssl/vsn.mk
index bda974da0e..171147adf2 100644
--- a/lib/ssl/vsn.mk
+++ b/lib/ssl/vsn.mk
@@ -1 +1 @@
-SSL_VSN = 5.3.8
+SSL_VSN = 7.0
diff --git a/lib/stdlib/doc/src/io.xml b/lib/stdlib/doc/src/io.xml
index a28180b42a..8ebfdb2e7f 100644
--- a/lib/stdlib/doc/src/io.xml
+++ b/lib/stdlib/doc/src/io.xml
@@ -505,7 +505,8 @@ ok
<p>Writes the data with standard syntax in the same way as
<c>~w</c>, but breaks terms whose printed representation
is longer than one line into many lines and indents each
- line sensibly. It also tries to detect lists of
+ line sensibly. Left justification is not supported.
+ It also tries to detect lists of
printable characters and to output these as strings. The
Unicode translation modifier is used for determining
what characters are printable. For example:</p>
diff --git a/lib/stdlib/doc/src/io_lib.xml b/lib/stdlib/doc/src/io_lib.xml
index 3312b08064..2117d66381 100644
--- a/lib/stdlib/doc/src/io_lib.xml
+++ b/lib/stdlib/doc/src/io_lib.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>1996</year><year>2013</year>
+ <year>1996</year><year>2014</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -59,6 +59,35 @@
<datatype>
<name name="latin1_string"/>
</datatype>
+ <datatype>
+ <name name="format_spec"/>
+ <desc><p>Description:</p>
+ <list type="bulleted">
+ <item><p><c>control_char</c> is the type of control
+ sequence: <c>$P</c>, <c>$w</c>, and so on;</p>
+ </item>
+ <item><p><c>args</c> is a list of the arguments used by the
+ control sequence, or an empty list if the control sequence
+ does not take any arguments;</p>
+ </item>
+ <item><p><c>width</c> is the field width;</p>
+ </item>
+ <item><p><c>adjust</c> is the adjustment;</p>
+ </item>
+ <item><p><c>precision</c> is the precision of the printed
+ argument;</p>
+ </item>
+ <item><p><c>pad_char</c> is the padding character;</p>
+ </item>
+ <item><p><c>encoding</c> is set to <c>true</c> if the translation
+ modifier <c>t</c> is present;</p>
+ </item>
+ <item><p><c>strings</c> is set to <c>false</c> if the modifier
+ <c>l</c> is present.</p>
+ </item>
+ </list>
+ </desc>
+ </datatype>
</datatypes>
<funcs>
<func>
@@ -260,6 +289,45 @@
</desc>
</func>
<func>
+ <name name="scan_format" arity="2"/>
+ <fsummary>Parse all control sequences in the format string</fsummary>
+ <desc>
+ <p>Returns a list corresponding to the given format string,
+ where control sequences have been replaced with
+ corresponding tuples. This list can be passed to <seealso
+ marker="#build_text/1">io_lib:build_text/1</seealso> to have
+ the same effect as <c>io_lib:format(Format, Args)</c>, or to
+ <seealso
+ marker="#unscan_format/1">io_lib:unscan_format/1</seealso>
+ in order to get the corresponding pair of <c>Format</c> and
+ <c>Args</c> (with every <c>*</c> and corresponding argument
+ expanded to numeric values).</p>
+ <p>A typical use of this function is to replace unbounded-size
+ control sequences like <c>~w</c> and <c>~p</c> with the
+ depth-limited variants <c>~W</c> and <c>~P</c> before
+ formatting to text, e.g. in a logger.</p>
+ </desc>
+ </func>
+ <func>
+ <name name="unscan_format" arity="1"/>
+ <fsummary>Revert a pre-parsed format list to a plain character list
+ and a list of arguments</fsummary>
+ <desc>
+ <p>See <seealso
+ marker="#scan_format/2">io_lib:scan_format/2</seealso> for
+ details.</p>
+ </desc>
+ </func>
+ <func>
+ <name name="build_text" arity="1"/>
+ <fsummary>Build the output text for a pre-parsed format list</fsummary>
+ <desc>
+ <p>See <seealso
+ marker="#scan_format/2">io_lib:scan_format/2</seealso> for
+ details.</p>
+ </desc>
+ </func>
+ <func>
<name name="indentation" arity="2"/>
<fsummary>Indentation after printing string</fsummary>
<desc>
diff --git a/lib/stdlib/doc/src/math.xml b/lib/stdlib/doc/src/math.xml
index 43cd20e726..7cfc8a1175 100644
--- a/lib/stdlib/doc/src/math.xml
+++ b/lib/stdlib/doc/src/math.xml
@@ -67,6 +67,7 @@
<name name="atanh" arity="1"/>
<name name="exp" arity="1"/>
<name name="log" arity="1"/>
+ <name name="log2" arity="1"/>
<name name="log10" arity="1"/>
<name name="pow" arity="2"/>
<name name="sqrt" arity="1"/>
diff --git a/lib/stdlib/doc/src/re.xml b/lib/stdlib/doc/src/re.xml
index a1833f6a51..5af1468e9b 100644
--- a/lib/stdlib/doc/src/re.xml
+++ b/lib/stdlib/doc/src/re.xml
@@ -150,7 +150,11 @@ This option makes it possible to include comments inside complicated patterns. N
<tag><c>no_start_optimize</c></tag>
<item>This option disables optimization that may malfunction if "Special start-of-pattern items" are present in the regular expression. A typical example would be when matching "DEFABC" against "(*COMMIT)ABC", where the start optimization of PCRE would skip the subject up to the "A" and would never realize that the (*COMMIT) instruction should have made the matching fail. This option is only relevant if you use "start-of-pattern items", as discussed in the section "PCRE regular expression details" below.</item>
<tag><c>ucp</c></tag>
- <item>Specifies that Unicode Character Properties should be used when resolving \B, \b, \D, \d, \S, \s, \Wand \w. Without this flag, only ISO-Latin-1 properties are used. Using Unicode properties hurts performance, but is semantically correct when working with Unicode characters beyond the ISO-Latin-1 range.</item>
+ <item>Specifies that Unicode Character Properties should be used when
+ resolving \B, \b, \D, \d, \S, \s, \W and \w. Without this flag, only
+ ISO-Latin-1 properties are used. Using Unicode properties hurts
+ performance, but is semantically correct when working with Unicode
+ characters beyond the ISO-Latin-1 range.</item>
<tag><c>never_utf</c></tag>
<item>Specifies that the (*UTF) and/or (*UTF8) "start-of-pattern items" are forbidden. This flag can not be combined with <c>unicode</c>. Useful if ISO-Latin-1 patterns from an external source are to be compiled.</item>
</taglist>
@@ -966,7 +970,7 @@ appearance causes an error.
</quote>
<p>This has the same effect as setting the <c>ucp</c> option: it causes sequences
such as \d and \w to use Unicode properties to determine character types,
-instead of recognizing only characters with codes less than 128 via a lookup
+instead of recognizing only characters with codes less than 256 via a lookup
table.
</p>
@@ -1307,7 +1311,8 @@ By default, the definition of letters and digits is controlled by PCRE's
low-valued character tables, in Erlang's case (and without the <c>unicode</c> option),
the ISO-Latin-1 character set.</p>
-<p>By default, in <c>unicode</c> mode, characters with values greater than 128 never match
+<p>By default, in <c>unicode</c> mode, characters with values greater than 255,
+i.e. all characters outside the ISO-Latin-1 character set, never match
\d, \s, or \w, and always match \D, \S, and \W. These sequences retain
their original meanings from before UTF support was available, mainly for
efficiency reasons. However, if the <c>ucp</c> option is set, the behaviour is changed so that Unicode
@@ -1954,10 +1959,10 @@ can be included in a class as a literal string of data units, or by using the
upper case and lower case versions, so for example, a caseless [aeiou] matches
"A" as well as "a", and a caseless [^aeiou] does not match "A", whereas a
caseful version would. In a UTF mode, PCRE always understands the concept of
-case for characters whose values are less than 128, so caseless matching is
+case for characters whose values are less than 256, so caseless matching is
always possible. For characters with higher values, the concept of case is
supported if PCRE is compiled with Unicode property support, but not otherwise.
-If you want to use caseless matching in a UTF mode for characters 128 and
+If you want to use caseless matching in a UTF mode for characters 256 and
above, you must ensure that PCRE is compiled with Unicode property support as
well as with UTF support.</p>
@@ -1989,7 +1994,7 @@ matches the letters in either case. For example, [W-c] is equivalent to
[][\\^_`wxyzabc], matched caselessly, and in a non-UTF mode, if character
tables for a French locale are in use, [\xc8-\xcb] matches accented E
characters in both cases. In UTF modes, PCRE supports the concept of case for
-characters with values greater than 128 only when it is compiled with Unicode
+characters with values greater than 255 only when it is compiled with Unicode
property support.</p>
<p>The character escape sequences \d, \D, \h, \H, \p, \P, \s, \S, \v,
@@ -2062,7 +2067,7 @@ by a ^ character after the colon. For example,</p>
syntax [.ch.] and [=ch=] where "ch" is a "collating element", but these are not
supported, and an error is given if they are encountered.</p>
-<p>By default, in UTF modes, characters with values greater than 128 do not match
+<p>By default, in UTF modes, characters with values greater than 255 do not match
any of the POSIX character classes. However, if the PCRE_UCP option is passed
to <b>pcre_compile()</b>, some of the classes are changed so that Unicode
character properties are used. This is achieved by replacing the POSIX classes
@@ -2081,7 +2086,7 @@ by other sequences, as follows:</p>
<p>Negated versions, such as [:^alpha:] use \P instead of \p. The other POSIX
classes are unchanged, and match only characters with code points less than
-128.</p>
+256.</p>
</section>
diff --git a/lib/stdlib/src/c.erl b/lib/stdlib/src/c.erl
index c2256c0cf9..9860adf04d 100644
--- a/lib/stdlib/src/c.erl
+++ b/lib/stdlib/src/c.erl
@@ -509,9 +509,12 @@ m(M) ->
{exports,E} = lists:keyfind(exports, 1, L),
Time = get_compile_time(L),
COpts = get_compile_options(L),
- format("Module ~w compiled: ",[M]), print_time(Time),
- format("Compiler options: ~p~n", [COpts]),
+ format("Module: ~w~n", [M]),
+ print_md5(L),
+ format("Compiled: "),
+ print_time(Time),
print_object_file(M),
+ format("Compiler options: ~p~n", [COpts]),
format("Exports: ~n",[]), print_exports(keysort(1, E)).
print_object_file(Mod) ->
@@ -522,6 +525,12 @@ print_object_file(Mod) ->
ignore
end.
+print_md5(L) ->
+ case lists:keyfind(md5, 1, L) of
+ {md5,<<MD5:128>>} -> io:format("MD5: ~.16b~n",[MD5]);
+ _ -> ok
+ end.
+
get_compile_time(L) ->
case get_compile_info(L, time) of
{ok,Val} -> Val;
@@ -569,8 +578,8 @@ split_print_exports([{F1, A1}|T1], [{F2, A2} | T2]) ->
split_print_exports([], []) -> ok.
print_time({Year,Month,Day,Hour,Min,_Secs}) ->
- format("Date: ~s ~w ~w, ", [month(Month),Day,Year]),
- format("Time: ~.2.0w.~.2.0w~n", [Hour,Min]);
+ format("~s ~w ~w, ", [month(Month),Day,Year]),
+ format("~.2.0w:~.2.0w~n", [Hour,Min]);
print_time(notime) ->
format("No compile time info available~n",[]).
diff --git a/lib/stdlib/src/dict.erl b/lib/stdlib/src/dict.erl
index cf8fb3114a..5a9f63c5e2 100644
--- a/lib/stdlib/src/dict.erl
+++ b/lib/stdlib/src/dict.erl
@@ -417,6 +417,8 @@ on_bucket(F, T, Slot) ->
%% could have implemented map and filter using fold but these are
%% faster. We hope!
+fold_dict(F, Acc, #dict{size=0}) when is_function(F, 3) ->
+ Acc;
fold_dict(F, Acc, D) ->
Segs = D#dict.segs,
fold_segs(F, Acc, Segs, tuple_size(Segs)).
@@ -434,6 +436,8 @@ fold_bucket(F, Acc, [?kv(Key,Val)|Bkt]) ->
fold_bucket(F, F(Key, Val, Acc), Bkt);
fold_bucket(F, Acc, []) when is_function(F, 3) -> Acc.
+map_dict(F, #dict{size=0} = Dict) when is_function(F, 2) ->
+ Dict;
map_dict(F, D) ->
Segs0 = tuple_to_list(D#dict.segs),
Segs1 = map_seg_list(F, Segs0),
@@ -453,6 +457,8 @@ map_bucket(F, [?kv(Key,Val)|Bkt]) ->
[?kv(Key,F(Key, Val))|map_bucket(F, Bkt)];
map_bucket(F, []) when is_function(F, 2) -> [].
+filter_dict(F, #dict{size=0} = Dict) when is_function(F, 2) ->
+ Dict;
filter_dict(F, D) ->
Segs0 = tuple_to_list(D#dict.segs),
{Segs1,Fc} = filter_seg_list(F, Segs0, [], 0),
diff --git a/lib/stdlib/src/erl_lint.erl b/lib/stdlib/src/erl_lint.erl
index 26d8454731..cbe6eeec3c 100644
--- a/lib/stdlib/src/erl_lint.erl
+++ b/lib/stdlib/src/erl_lint.erl
@@ -744,6 +744,8 @@ attribute_state(Form, St) ->
%% State'
%% Allow for record, type and opaque type definitions and spec
%% declarations to be intersperced within function definitions.
+%% Dialyzer attributes are also allowed everywhere, but are not
+%% checked at all.
function_state({attribute,L,record,{Name,Fields}}, St) ->
record_def(L, Name, Fields, St);
@@ -753,6 +755,8 @@ function_state({attribute,L,opaque,{TypeName,TypeDef,Args}}, St) ->
type_def(opaque, L, TypeName, TypeDef, Args, St);
function_state({attribute,L,spec,{Fun,Types}}, St) ->
spec_decl(L, Fun, Types, St);
+function_state({attribute,_L,dialyzer,_Val}, St) ->
+ St;
function_state({attribute,La,Attr,_Val}, St) ->
add_error(La, {attribute,Attr}, St);
function_state({function,L,N,A,Cs}, St) ->
@@ -2266,11 +2270,10 @@ expr({remote,Line,_M,_F}, _Vt, St) ->
%% {UsedVarTable,State}
expr_list(Es, Vt, St) ->
- {Vt1,St1} = foldl(fun (E, {Esvt,St0}) ->
- {Evt,St1} = expr(E, Vt, St0),
- {vtmerge_pat(Evt, Esvt),St1}
- end, {[],St}, Es),
- {vtmerge(vtnew(Vt1, Vt), vtold(Vt1, Vt)),St1}.
+ foldl(fun (E, {Esvt,St0}) ->
+ {Evt,St1} = expr(E, Vt, St0),
+ {vtmerge_pat(Evt, Esvt),St1}
+ end, {[],St}, Es).
record_expr(Line, Rec, Vt, St0) ->
St1 = warn_invalid_record(Line, Rec, St0),
@@ -2288,8 +2291,8 @@ map_fields([{Tag,_,K,V}|Fs], Vt, St, F) when Tag =:= map_field_assoc;
{Pvt,St2} = F([K,V], Vt, St),
{Vts,St3} = map_fields(Fs, Vt, St2, F),
{vtupdate(Pvt, Vts),St3};
-map_fields([], Vt, St, _) ->
- {Vt,St}.
+map_fields([], _, St, _) ->
+ {[],St}.
%% warn_invalid_record(Line, Record, State0) -> State
%% Adds warning if the record is invalid.
diff --git a/lib/stdlib/src/ets.erl b/lib/stdlib/src/ets.erl
index 26b0393b35..09c8924650 100644
--- a/lib/stdlib/src/ets.erl
+++ b/lib/stdlib/src/ets.erl
@@ -1625,13 +1625,18 @@ choice(Height, Width, P, Mode, Tab, Key, Turn, Opos) ->
end.
get_line(P, Default) ->
- case io:get_line(P) of
+ case line_string(io:get_line(P)) of
"\n" ->
Default;
L ->
L
end.
+%% If the standard input is set to binary mode
+%% convert it to a list so we can properly match.
+line_string(Binary) when is_binary(Binary) -> unicode:characters_to_list(Binary);
+line_string(Other) -> Other.
+
nonl(S) -> string:strip(S, right, $\n).
print_number(Tab, Key, Num) ->
diff --git a/lib/stdlib/src/io_lib.erl b/lib/stdlib/src/io_lib.erl
index adc9a0cf5f..e90cda0533 100644
--- a/lib/stdlib/src/io_lib.erl
+++ b/lib/stdlib/src/io_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2014. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -60,6 +60,7 @@
-module(io_lib).
-export([fwrite/2,fread/2,fread/3,format/2]).
+-export([scan_format/2,unscan_format/1,build_text/1]).
-export([print/1,print/4,indentation/2]).
-export([write/1,write/2,write/3,nl/0,format_prompt/1,format_prompt/2]).
@@ -83,7 +84,7 @@
deep_unicode_char_list/1]).
-export_type([chars/0, latin1_string/0, continuation/0,
- fread_error/0, fread_item/0]).
+ fread_error/0, fread_item/0, format_spec/0]).
%%----------------------------------------------------------------------
@@ -108,6 +109,18 @@
-type fread_item() :: string() | atom() | integer() | float().
+-type format_spec() ::
+ #{
+ control_char => char(),
+ args => [any()],
+ width => 'none' | integer(),
+ adjust => 'left' | 'right',
+ precision => 'none' | integer(),
+ pad_char => char(),
+ encoding => 'unicode' | 'latin1',
+ strings => boolean()
+ }.
+
%%----------------------------------------------------------------------
%% Interface calls to sub-modules.
@@ -156,6 +169,31 @@ format(Format, Args) ->
Other
end.
+-spec scan_format(Format, Data) -> FormatList when
+ Format :: io:format(),
+ Data :: [term()],
+ FormatList :: [char() | format_spec()].
+
+scan_format(Format, Args) ->
+ try io_lib_format:scan(Format, Args)
+ catch
+ _:_ -> erlang:error(badarg, [Format, Args])
+ end.
+
+-spec unscan_format(FormatList) -> {Format, Data} when
+ FormatList :: [char() | format_spec()],
+ Format :: io:format(),
+ Data :: [term()].
+
+unscan_format(FormatList) ->
+ io_lib_format:unscan(FormatList).
+
+-spec build_text(FormatList) -> chars() when
+ FormatList :: [char() | format_spec()].
+
+build_text(FormatList) ->
+ io_lib_format:build(FormatList).
+
-spec print(Term) -> chars() when
Term :: term().
diff --git a/lib/stdlib/src/io_lib_format.erl b/lib/stdlib/src/io_lib_format.erl
index 89ae6fb187..015afb317a 100644
--- a/lib/stdlib/src/io_lib_format.erl
+++ b/lib/stdlib/src/io_lib_format.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2014. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -20,10 +20,9 @@
%% Formatting functions of io library.
--export([fwrite/2,fwrite_g/1,indentation/2]).
+-export([fwrite/2,fwrite_g/1,indentation/2,scan/2,unscan/1,build/1]).
-%% fwrite(Format, ArgList) -> string().
-%% Format the arguments in ArgList after string Format. Just generate
+%% Format the arguments in Args after string Format. Just generate
%% an error if there is an error in the arguments.
%%
%% To do the printing command correctly we need to calculate the
@@ -37,15 +36,84 @@
%% and it also splits the handling of the control characters into two
%% parts.
-fwrite(Format, Args) when is_atom(Format) ->
- fwrite(atom_to_list(Format), Args);
-fwrite(Format, Args) when is_binary(Format) ->
- fwrite(binary_to_list(Format), Args);
+-spec fwrite(Format, Data) -> FormatList when
+ Format :: io:format(),
+ Data :: [term()],
+ FormatList :: [char() | io_lib:format_spec()].
+
fwrite(Format, Args) ->
- Cs = collect(Format, Args),
+ build(scan(Format, Args)).
+
+%% Build the output text for a pre-parsed format list.
+
+-spec build(FormatList) -> io_lib:chars() when
+ FormatList :: [char() | io_lib:format_spec()].
+
+build(Cs) ->
Pc = pcount(Cs),
build(Cs, Pc, 0).
+%% Parse all control sequences in the format string.
+
+-spec scan(Format, Data) -> FormatList when
+ Format :: io:format(),
+ Data :: [term()],
+ FormatList :: [char() | io_lib:format_spec()].
+
+scan(Format, Args) when is_atom(Format) ->
+ scan(atom_to_list(Format), Args);
+scan(Format, Args) when is_binary(Format) ->
+ scan(binary_to_list(Format), Args);
+scan(Format, Args) ->
+ collect(Format, Args).
+
+%% Revert a pre-parsed format list to a plain character list and a
+%% list of arguments.
+
+-spec unscan(FormatList) -> {Format, Data} when
+ FormatList :: [char() | io_lib:format_spec()],
+ Format :: io:format(),
+ Data :: [term()].
+
+unscan(Cs) ->
+ {print(Cs), args(Cs)}.
+
+args([#{args := As} | Cs]) ->
+ As ++ args(Cs);
+args([_C | Cs]) ->
+ args(Cs);
+args([]) ->
+ [].
+
+print([#{control_char := C, width := F, adjust := Ad, precision := P,
+ pad_char := Pad, encoding := Encoding, strings := Strings} | Cs]) ->
+ print(C, F, Ad, P, Pad, Encoding, Strings) ++ print(Cs);
+print([C | Cs]) ->
+ [C | print(Cs)];
+print([]) ->
+ [].
+
+print(C, F, Ad, P, Pad, Encoding, Strings) ->
+ [$~] ++ print_field_width(F, Ad) ++ print_precision(P) ++
+ print_pad_char(Pad) ++ print_encoding(Encoding) ++
+ print_strings(Strings) ++ [C].
+
+print_field_width(none, _Ad) -> "";
+print_field_width(F, left) -> integer_to_list(-F);
+print_field_width(F, right) -> integer_to_list(F).
+
+print_precision(none) -> "";
+print_precision(P) -> [$. | integer_to_list(P)].
+
+print_pad_char($\s) -> ""; % default, no need to make explicit
+print_pad_char(Pad) -> [$., Pad].
+
+print_encoding(unicode) -> "t";
+print_encoding(latin1) -> "".
+
+print_strings(false) -> "l";
+print_strings(true) -> "".
+
collect([$~|Fmt0], Args0) ->
{C,Fmt1,Args1} = collect_cseq(Fmt0, Args0),
[C|collect(Fmt1, Args1)];
@@ -60,7 +128,10 @@ collect_cseq(Fmt0, Args0) ->
{Encoding,Fmt4,Args4} = encoding(Fmt3, Args3),
{Strings,Fmt5,Args5} = strings(Fmt4, Args4),
{C,As,Fmt6,Args6} = collect_cc(Fmt5, Args5),
- {{C,As,F,Ad,P,Pad,Encoding,Strings},Fmt6,Args6}.
+ FormatSpec = #{control_char => C, args => As, width => F, adjust => Ad,
+ precision => P, pad_char => Pad, encoding => Encoding,
+ strings => Strings},
+ {FormatSpec,Fmt6,Args6}.
encoding([$t|Fmt],Args) ->
true = hd(Fmt) =/= $l,
@@ -136,17 +207,19 @@ collect_cc([$i|Fmt], [A|Args]) -> {$i,[A],Fmt,Args}.
pcount(Cs) -> pcount(Cs, 0).
-pcount([{$p,_As,_F,_Ad,_P,_Pad,_Enc,_Str}|Cs], Acc) -> pcount(Cs, Acc+1);
-pcount([{$P,_As,_F,_Ad,_P,_Pad,_Enc,_Str}|Cs], Acc) -> pcount(Cs, Acc+1);
+pcount([#{control_char := $p}|Cs], Acc) -> pcount(Cs, Acc+1);
+pcount([#{control_char := $P}|Cs], Acc) -> pcount(Cs, Acc+1);
pcount([_|Cs], Acc) -> pcount(Cs, Acc);
pcount([], Acc) -> Acc.
-%% build([Control], Pc, Indentation) -> string().
+%% build([Control], Pc, Indentation) -> io_lib:chars().
%% Interpret the control structures. Count the number of print
%% remaining and only calculate indentation when necessary. Must also
%% be smart when calculating indentation for characters in format.
-build([{C,As,F,Ad,P,Pad,Enc,Str}|Cs], Pc0, I) ->
+build([#{control_char := C, args := As, width := F, adjust := Ad,
+ precision := P, pad_char := Pad, encoding := Enc,
+ strings := Str} | Cs], Pc0, I) ->
S = control(C, As, F, Ad, P, Pad, Enc, Str, I),
Pc1 = decr_pc(C, Pc0),
if
@@ -162,10 +235,14 @@ decr_pc($p, Pc) -> Pc - 1;
decr_pc($P, Pc) -> Pc - 1;
decr_pc(_, Pc) -> Pc.
-%% indentation(String, Indentation) -> Indentation.
+
%% Calculate the indentation of the end of a string given its start
%% indentation. We assume tabs at 8 cols.
+-spec indentation(String, StartIndent) -> integer() when
+ String :: io_lib:chars(),
+ StartIndent :: integer().
+
indentation([$\n|Cs], _I) -> indentation(Cs, 0);
indentation([$\t|Cs], I) -> indentation(Cs, ((I + 8) div 8) * 8);
indentation([C|Cs], I) when is_integer(C) ->
@@ -366,7 +443,6 @@ float_data([D|Cs], Ds) when D >= $0, D =< $9 ->
float_data([_|Cs], Ds) ->
float_data(Cs, Ds).
-%% fwrite_g(Float)
%% Writes the shortest, correctly rounded string that converts
%% to Float when read back with list_to_float/1.
%%
@@ -374,6 +450,8 @@ float_data([_|Cs], Ds) ->
%% in Proceedings of the SIGPLAN '96 Conference on Programming
%% Language Design and Implementation.
+-spec fwrite_g(float()) -> string().
+
fwrite_g(0.0) ->
"0.0";
fwrite_g(Float) when is_float(Float) ->
@@ -642,7 +720,7 @@ prefixed_integer(Int, F, Adj, Base, Pad, Prefix, Lowercase)
term([Prefix|S], F, Adj, none, Pad)
end.
-%% char(Char, Field, Adjust, Precision, PadChar) -> string().
+%% char(Char, Field, Adjust, Precision, PadChar) -> chars().
char(C, none, _Adj, none, _Pad) -> [C];
char(C, F, _Adj, none, _Pad) -> chars(C, F);
diff --git a/lib/stdlib/src/math.erl b/lib/stdlib/src/math.erl
index 98a70b1644..43f736e54c 100644
--- a/lib/stdlib/src/math.erl
+++ b/lib/stdlib/src/math.erl
@@ -24,7 +24,7 @@
-export([sin/1, cos/1, tan/1, asin/1, acos/1, atan/1, atan2/2, sinh/1,
cosh/1, tanh/1, asinh/1, acosh/1, atanh/1, exp/1, log/1,
- log10/1, pow/2, sqrt/1, erf/1, erfc/1]).
+ log2/1, log10/1, pow/2, sqrt/1, erf/1, erfc/1]).
-spec acos(X) -> float() when
X :: number().
@@ -92,6 +92,11 @@ exp(_) ->
log(_) ->
erlang:nif_error(undef).
+-spec log2(X) -> float() when
+ X :: number().
+log2(_) ->
+ erlang:nif_error(undef).
+
-spec log10(X) -> float() when
X :: number().
log10(_) ->
diff --git a/lib/stdlib/src/otp_internal.erl b/lib/stdlib/src/otp_internal.erl
index 0ace87ef5c..4a338798d0 100644
--- a/lib/stdlib/src/otp_internal.erl
+++ b/lib/stdlib/src/otp_internal.erl
@@ -578,6 +578,19 @@ obsolete_1(asn1rt, utf8_binary_to_list, 1) ->
obsolete_1(asn1rt, utf8_list_to_binary, 1) ->
{deprecated,{unicode,characters_to_binary,1}};
+%% Added in OTP 18.
+obsolete_1(core_lib, get_anno, 1) ->
+ {deprecated,{cerl,get_ann,1}};
+obsolete_1(core_lib, set_anno, 2) ->
+ {deprecated,{cerl,set_ann,2}};
+obsolete_1(core_lib, is_literal, 1) ->
+ {deprecated,{cerl,is_literal,1}};
+obsolete_1(core_lib, is_literal_list, 1) ->
+ {deprecated,"deprecated; use lists:all(fun cerl:is_literal/1, L)"
+ " instead"};
+obsolete_1(core_lib, literal_value, 1) ->
+ {deprecated,{core_lib,concrete,1}};
+
obsolete_1(_, _, _) ->
no.
diff --git a/lib/stdlib/src/string.erl b/lib/stdlib/src/string.erl
index f9b083a56d..f6903d1c3d 100644
--- a/lib/stdlib/src/string.erl
+++ b/lib/stdlib/src/string.erl
@@ -221,23 +221,47 @@ substr2([_|String], S) -> substr2(String, S-1).
Tokens :: [Token :: nonempty_string()].
tokens(S, Seps) ->
- tokens1(S, Seps, []).
+ case Seps of
+ [] ->
+ case S of
+ [] -> [];
+ [_|_] -> [S]
+ end;
+ [C] ->
+ tokens_single_1(reverse(S), C, []);
+ [_|_] ->
+ tokens_multiple_1(reverse(S), Seps, [])
+ end.
-tokens1([C|S], Seps, Toks) ->
+tokens_single_1([Sep|S], Sep, Toks) ->
+ tokens_single_1(S, Sep, Toks);
+tokens_single_1([C|S], Sep, Toks) ->
+ tokens_single_2(S, Sep, Toks, [C]);
+tokens_single_1([], _, Toks) ->
+ Toks.
+
+tokens_single_2([Sep|S], Sep, Toks, Tok) ->
+ tokens_single_1(S, Sep, [Tok|Toks]);
+tokens_single_2([C|S], Sep, Toks, Tok) ->
+ tokens_single_2(S, Sep, Toks, [C|Tok]);
+tokens_single_2([], _Sep, Toks, Tok) ->
+ [Tok|Toks].
+
+tokens_multiple_1([C|S], Seps, Toks) ->
case member(C, Seps) of
- true -> tokens1(S, Seps, Toks);
- false -> tokens2(S, Seps, Toks, [C])
+ true -> tokens_multiple_1(S, Seps, Toks);
+ false -> tokens_multiple_2(S, Seps, Toks, [C])
end;
-tokens1([], _Seps, Toks) ->
- reverse(Toks).
+tokens_multiple_1([], _Seps, Toks) ->
+ Toks.
-tokens2([C|S], Seps, Toks, Cs) ->
+tokens_multiple_2([C|S], Seps, Toks, Tok) ->
case member(C, Seps) of
- true -> tokens1(S, Seps, [reverse(Cs)|Toks]);
- false -> tokens2(S, Seps, Toks, [C|Cs])
+ true -> tokens_multiple_1(S, Seps, [Tok|Toks]);
+ false -> tokens_multiple_2(S, Seps, Toks, [C|Tok])
end;
-tokens2([], _Seps, Toks, Cs) ->
- reverse([reverse(Cs)|Toks]).
+tokens_multiple_2([], _Seps, Toks, Tok) ->
+ [Tok|Toks].
-spec chars(Character, Number) -> String when
Character :: char(),
diff --git a/lib/stdlib/test/erl_lint_SUITE.erl b/lib/stdlib/test/erl_lint_SUITE.erl
index f8a99f653a..a7c3fd3c2e 100644
--- a/lib/stdlib/test/erl_lint_SUITE.erl
+++ b/lib/stdlib/test/erl_lint_SUITE.erl
@@ -3708,7 +3708,13 @@ maps(Config) ->
">>,
[],
{errors,[{4,erl_lint,illegal_map_construction},
- {6,erl_lint,illegal_map_key}],[]}}],
+ {6,erl_lint,illegal_map_key}],[]}},
+ {unused_vars_with_empty_maps,
+ <<"t(Foo, Bar, Baz) -> {#{},#{}}.">>,
+ [warn_unused_variables],
+ {warnings,[{1,erl_lint,{unused_var,'Bar'}},
+ {1,erl_lint,{unused_var,'Baz'}},
+ {1,erl_lint,{unused_var,'Foo'}}]}}],
[] = run(Config, Ts),
ok.
diff --git a/lib/stdlib/test/io_SUITE.erl b/lib/stdlib/test/io_SUITE.erl
index 2203dd8f51..8d53949c40 100644
--- a/lib/stdlib/test/io_SUITE.erl
+++ b/lib/stdlib/test/io_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2014. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -31,7 +31,7 @@
printable_range/1,
io_lib_print_binary_depth_one/1, otp_10302/1, otp_10755/1,
otp_10836/1, io_lib_width_too_small/1,
- io_with_huge_message_queue/1]).
+ io_with_huge_message_queue/1, format_string/1]).
-export([pretty/2]).
@@ -71,7 +71,8 @@ all() ->
io_fread_newlines, otp_8989, io_lib_fread_literal,
printable_range,
io_lib_print_binary_depth_one, otp_10302, otp_10755, otp_10836,
- io_lib_width_too_small, io_with_huge_message_queue].
+ io_lib_width_too_small, io_with_huge_message_queue,
+ format_string].
groups() ->
[].
@@ -1035,7 +1036,14 @@ rp(Term, Col, Ll, D, M, RF) ->
lists:flatten(io_lib:format("~s", [R])).
fmt(Fmt, Args) ->
- lists:flatten(io_lib:format(Fmt, Args)).
+ FormatList = io_lib:scan_format(Fmt, Args),
+ {Fmt2, Args2} = io_lib:unscan_format(FormatList),
+ Chars1 = lists:flatten(io_lib:build_text(FormatList)),
+ Chars2 = lists:flatten(io_lib:format(Fmt2, Args2)),
+ Chars3 = lists:flatten(io_lib:format(Fmt, Args)),
+ Chars1 = Chars2,
+ Chars2 = Chars3,
+ Chars3.
rfd(a, 0) ->
[];
@@ -2261,3 +2269,9 @@ writes(0, _) -> ok;
writes(N, F1) ->
file:write(F1, "hello\n"),
writes(N - 1, F1).
+
+format_string(Config) ->
+ %% All but padding is tested by fmt/2.
+ "xxxxxxsssx" = fmt("~10.4.xs", ["sss"]),
+ "xxxxxxsssx" = fmt("~10.4.*s", [$x, "sss"]),
+ ok.
diff --git a/lib/stdlib/test/stdlib_SUITE.erl b/lib/stdlib/test/stdlib_SUITE.erl
index 6669a21b9c..206eb4fd74 100644
--- a/lib/stdlib/test/stdlib_SUITE.erl
+++ b/lib/stdlib/test/stdlib_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -172,9 +172,16 @@ major_upgrade(Config) ->
%% Version numbers are checked by ct_release_test, so there is nothing
%% more to check here...
-upgrade_init(State) ->
+upgrade_init(CtData,State) ->
+ {ok,{FromVsn,ToVsn}} = ct_release_test:get_app_vsns(CtData,stdlib),
+ case ct_release_test:get_appup(CtData,stdlib) of
+ {ok,{FromVsn,ToVsn,[restart_new_emulator],[restart_new_emulator]}} ->
+ io:format("Upgrade/downgrade ~p <--> ~p",[FromVsn,ToVsn]);
+ {error,{vsn_not_found,_}} when FromVsn==ToVsn ->
+ io:format("No upgrade test for stdlib, same version")
+ end,
State.
-upgrade_upgraded(State) ->
+upgrade_upgraded(_CtData,State) ->
State.
-upgrade_downgraded(State) ->
+upgrade_downgraded(_CtData,State) ->
State.
diff --git a/lib/stdlib/test/string_SUITE.erl b/lib/stdlib/test/string_SUITE.erl
index fccd1bef95..a55c710d50 100644
--- a/lib/stdlib/test/string_SUITE.erl
+++ b/lib/stdlib/test/string_SUITE.erl
@@ -217,21 +217,39 @@ substr(Config) when is_list(Config) ->
?line {'EXIT',_} = (catch string:substr("1234", "1")),
ok.
-tokens(suite) ->
- [];
-tokens(doc) ->
- [];
tokens(Config) when is_list(Config) ->
- ?line [] = string:tokens("",""),
- ?line [] = string:tokens("abc","abc"),
- ?line ["abc"] = string:tokens("abc", ""),
- ?line ["1","2 34","4","5"] = string:tokens("1,2 34,4;5", ";,"),
- %% invalid arg type
- ?line {'EXIT',_} = (catch string:tokens('x,y', ",")),
+ [] = string:tokens("",""),
+ [] = string:tokens("abc","abc"),
+ ["abc"] = string:tokens("abc", ""),
+ ["1","2 34","45","5","6","7"] = do_tokens("1,2 34,45;5,;6;,7", ";,"),
+
%% invalid arg type
- ?line {'EXIT',_} = (catch string:tokens("x,y", ',')),
+ {'EXIT',_} = (catch string:tokens('x,y', ",")),
+ {'EXIT',_} = (catch string:tokens("x,y", ',')),
ok.
+do_tokens(S0, Sep0) ->
+ [H|T] = Sep0,
+ S = [replace_sep(C, T, H) || C <- S0],
+ Sep = [H],
+ io:format("~p ~p\n", [S0,Sep0]),
+ io:format("~p ~p\n", [S,Sep]),
+
+ Res = string:tokens(S0, Sep0),
+ Res = string:tokens(Sep0++S0, Sep0),
+ Res = string:tokens(S0++Sep0, Sep0),
+
+ Res = string:tokens(S, Sep),
+ Res = string:tokens(Sep++S, Sep),
+ Res = string:tokens(S++Sep, Sep),
+
+ Res.
+
+replace_sep(C, Seps, New) ->
+ case lists:member(C, Seps) of
+ true -> New;
+ false -> C
+ end.
chars(suite) ->
[];
diff --git a/lib/syntax_tools/src/epp_dodger.erl b/lib/syntax_tools/src/epp_dodger.erl
index 7e12eab1b5..3ca7a8197e 100644
--- a/lib/syntax_tools/src/epp_dodger.erl
+++ b/lib/syntax_tools/src/epp_dodger.erl
@@ -88,7 +88,7 @@
%% This is a so-called Erlang I/O ErrorInfo structure; see the {@link
%% //stdlib/io} module for details.
--type errorinfo() :: term(). % {integer(), atom(), term()}.
+-type errorinfo() :: {integer(), atom(), term()}.
-type option() :: atom() | {atom(), term()}.
@@ -208,8 +208,8 @@ do_parse_file(DefEncoding, File, Parser, Options) ->
try Parser(Dev, 1, Options)
after ok = file:close(Dev)
end;
- {error, _} = Error ->
- Error
+ {error, Error} ->
+ {error, {0, file, Error}} % defer to file:format_error/1
end.
find_invalid_unicode([H|T]) ->
diff --git a/lib/syntax_tools/src/erl_prettypr.erl b/lib/syntax_tools/src/erl_prettypr.erl
index 877675772f..81272e62de 100644
--- a/lib/syntax_tools/src/erl_prettypr.erl
+++ b/lib/syntax_tools/src/erl_prettypr.erl
@@ -50,8 +50,7 @@
| fun((erl_syntax:syntaxTree(), _, _) -> prettypr:document()).
-type clause_t() :: 'case_expr' | 'cond_expr' | 'fun_expr'
| 'if_expr' | 'receive_expr' | 'try_expr'
- | {'function', prettypr:document()}
- | {'rule', prettypr:document()}.
+ | {'function', prettypr:document()}.
-record(ctxt, {prec = 0 :: integer(),
sub_indent = 2 :: non_neg_integer(),
@@ -587,8 +586,6 @@ lay_2(Node, Ctxt) ->
make_case_clause(D1, D2, D3, Ctxt);
try_expr ->
make_case_clause(D1, D2, D3, Ctxt);
- {rule, N} ->
- make_rule_clause(N, D1, D2, D3, Ctxt);
undefined ->
%% If a clause is formatted out of context, we
%% use a "fun-expression" clause style.
@@ -851,14 +848,10 @@ lay_2(Node, Ctxt) ->
floating(text(".")),
lay(erl_syntax:record_access_field(Node),
set_prec(Ctxt, PrecR))),
- D3 = case erl_syntax:record_access_type(Node) of
- none ->
- D2;
- T ->
- beside(beside(floating(text("#")),
- lay(T, reset_prec(Ctxt))),
- D2)
- end,
+ T = erl_syntax:record_access_type(Node),
+ D3 = beside(beside(floating(text("#")),
+ lay(T, reset_prec(Ctxt))),
+ D2),
maybe_parentheses(beside(D1, D3), Prec, Ctxt);
record_expr ->
@@ -926,15 +919,6 @@ lay_2(Node, Ctxt) ->
D2 = lay(erl_syntax:map_field_exact_value(Node), Ctxt1),
par([D1, floating(text(":=")), D2], Ctxt1#ctxt.break_indent);
- rule ->
- %% Comments on the name will be repeated; cf.
- %% `function'.
- Ctxt1 = reset_prec(Ctxt),
- D1 = lay(erl_syntax:rule_name(Node), Ctxt1),
- D2 = lay_clauses(erl_syntax:rule_clauses(Node),
- {rule, D1}, Ctxt1),
- beside(D2, floating(text(".")));
-
size_qualifier ->
Ctxt1 = set_prec(Ctxt, max_prec()),
D1 = lay(erl_syntax:size_qualifier_body(Node), Ctxt1),
@@ -1073,10 +1057,6 @@ make_fun_clause_head(N, P, Ctxt) ->
beside(N, D)
end.
-make_rule_clause(N, P, G, B, Ctxt) ->
- D = make_fun_clause_head(N, P, Ctxt),
- append_rule_body(B, append_guard(G, D, Ctxt), Ctxt).
-
make_case_clause(P, G, B, Ctxt) ->
append_clause_body(B, append_guard(G, P, Ctxt), Ctxt).
@@ -1092,9 +1072,6 @@ make_if_clause(_P, G, B, Ctxt) ->
append_clause_body(B, D, Ctxt) ->
append_clause_body(B, D, floating(text(" ->")), Ctxt).
-append_rule_body(B, D, Ctxt) ->
- append_clause_body(B, D, floating(text(" :-")), Ctxt).
-
append_clause_body(B, D, S, Ctxt) ->
sep([beside(D, S), nest(Ctxt#ctxt.break_indent, B)]).
diff --git a/lib/syntax_tools/src/erl_syntax.erl b/lib/syntax_tools/src/erl_syntax.erl
index de271d7f2f..3f2a3e05dd 100644
--- a/lib/syntax_tools/src/erl_syntax.erl
+++ b/lib/syntax_tools/src/erl_syntax.erl
@@ -254,7 +254,6 @@
receive_expr_action/1,
receive_expr_clauses/1,
receive_expr_timeout/1,
- record_access/2,
record_access/3,
record_access_argument/1,
record_access_field/1,
@@ -271,10 +270,6 @@
record_index_expr/2,
record_index_expr_field/1,
record_index_expr_type/1,
- rule/2,
- rule_arity/1,
- rule_clauses/1,
- rule_name/1,
size_qualifier/2,
size_qualifier_argument/1,
size_qualifier_body/1,
@@ -472,19 +467,16 @@
%% <td>record_field</td>
%% </tr><tr>
%% <td>record_index_expr</td>
-%% <td>rule</td>
%% <td>size_qualifier</td>
%% <td>string</td>
-%% </tr><tr>
%% <td>text</td>
+%% </tr><tr>
%% <td>try_expr</td>
%% <td>tuple</td>
%% <td>underscore</td>
-%% </tr><tr>
%% <td>variable</td>
+%% </tr><tr>
%% <td>warning_marker</td>
-%% <td></td>
-%% <td></td>
%% </tr>
%% </table></center>
%%
@@ -540,7 +532,6 @@
%% @see record_expr/2
%% @see record_field/2
%% @see record_index_expr/2
-%% @see rule/2
%% @see size_qualifier/2
%% @see string/1
%% @see text/1
@@ -607,10 +598,8 @@ type(Node) ->
{record, _, _, _, _} -> record_expr;
{record, _, _, _} -> record_expr;
{record_field, _, _, _, _} -> record_access;
- {record_field, _, _, _} -> record_access;
{record_index, _, _, _} -> record_index_expr;
{remote, _, _, _} -> module_qualifier;
- {rule, _, _, _, _} -> rule;
{'try', _, _, _, _, _} -> try_expr;
{tuple, _, _} -> tuple;
_ ->
@@ -693,10 +682,9 @@ is_leaf(Node) ->
%% <td>`comment'</td>
%% <td>`error_marker'</td>
%% <td>`eof_marker'</td>
-%% <td>`form_list'</td>
%% </tr><tr>
+%% <td>`form_list'</td>
%% <td>`function'</td>
-%% <td>`rule'</td>
%% <td>`warning_marker'</td>
%% <td>`text'</td>
%% </tr>
@@ -709,7 +697,6 @@ is_leaf(Node) ->
%% @see error_marker/1
%% @see form_list/1
%% @see function/2
-%% @see rule/2
%% @see warning_marker/1
-spec is_form(syntaxTree()) -> boolean().
@@ -722,7 +709,6 @@ is_form(Node) ->
eof_marker -> true;
error_marker -> true;
form_list -> true;
- rule -> true;
warning_marker -> true;
text -> true;
_ -> false
@@ -3480,7 +3466,6 @@ module_qualifier_body(Node) ->
%% @see function_clauses/1
%% @see function_arity/1
%% @see is_form/1
-%% @see rule/2
%% Don't use the name 'function' for this record, to avoid confusion with
%% the tuples on the form {function,Name,Arity} used by erl_parse.
@@ -4310,49 +4295,32 @@ record_index_expr_field(Node) ->
%% =====================================================================
-%% @equiv record_access(Argument, none, Field)
-
--spec record_access(syntaxTree(), syntaxTree()) -> syntaxTree().
-
-record_access(Argument, Field) ->
- record_access(Argument, none, Field).
-
-
-%% =====================================================================
-%% @doc Creates an abstract record field access expression. If
-%% `Type' is not `none', the result represents
-%% "<code><em>Argument</em>#<em>Type</em>.<em>Field</em></code>".
-%%
-%% If `Type' is `none', the result represents
-%% "<code><em>Argument</em>.<em>Field</em></code>". This is a special
-%% form only allowed within Mnemosyne queries.
+%% @doc Creates an abstract record field access expression. The result
+%% represents "<code><em>Argument</em>#<em>Type</em>.<em>Field</em></code>".
%%
-%% @see record_access/2
%% @see record_access_argument/1
%% @see record_access_type/1
%% @see record_access_field/1
%% @see record_expr/3
-record(record_access, {argument :: syntaxTree(),
- type :: 'none' | syntaxTree(),
+ type :: syntaxTree(),
field :: syntaxTree()}).
%% type(Node) = record_access
%% data(Node) = #record_access{argument :: Argument, type :: Type,
%% field :: Field}
%%
-%% Argument = Field = syntaxTree()
-%% Type = none | syntaxTree()
+%% Argument = Type = Field = syntaxTree()
%%
%% `erl_parse' representation:
%%
%% {record_field, Pos, Argument, Type, Field}
-%% {record_field, Pos, Argument, Field}
%%
%% Argument = Field = erl_parse()
%% Type = atom()
--spec record_access(syntaxTree(), 'none' | syntaxTree(), syntaxTree()) ->
+-spec record_access(syntaxTree(), syntaxTree(), syntaxTree()) ->
syntaxTree().
record_access(Argument, Type, Field) ->
@@ -4365,16 +4333,11 @@ revert_record_access(Node) ->
Argument = record_access_argument(Node),
Type = record_access_type(Node),
Field = record_access_field(Node),
- if Type =:= none ->
- {record_field, Pos, Argument, Field};
- true ->
- case type(Type) of
- atom ->
- {record_field, Pos,
- Argument, concrete(Type), Field};
- _ ->
- Node
- end
+ case type(Type) of
+ atom ->
+ {record_field, Pos, Argument, concrete(Type), Field};
+ _ ->
+ Node
end.
@@ -4387,8 +4350,6 @@ revert_record_access(Node) ->
record_access_argument(Node) ->
case unwrap(Node) of
- {record_field, _, Argument, _} ->
- Argument;
{record_field, _, Argument, _, _} ->
Argument;
Node1 ->
@@ -4397,21 +4358,14 @@ record_access_argument(Node) ->
%% =====================================================================
-%% @doc Returns the type subtree of a `record_access' node,
-%% if any. If `Node' represents
-%% "<code><em>Argument</em>.<em>Field</em></code>", `none'
-%% is returned, otherwise if `Node' represents
-%% "<code><em>Argument</em>#<em>Type</em>.<em>Field</em></code>",
-%% `Type' is returned.
+%% @doc Returns the type subtree of a `record_access' node.
%%
%% @see record_access/3
--spec record_access_type(syntaxTree()) -> 'none' | syntaxTree().
+-spec record_access_type(syntaxTree()) -> syntaxTree().
record_access_type(Node) ->
case unwrap(Node) of
- {record_field, _, _, _} ->
- none;
{record_field, Pos, _, Type, _} ->
set_pos(atom(Type), Pos);
Node1 ->
@@ -4428,8 +4382,6 @@ record_access_type(Node) ->
record_access_field(Node) ->
case unwrap(Node) of
- {record_field, _, _, Field} ->
- Field;
{record_field, _, _, _, Field} ->
Field;
Node1 ->
@@ -4808,117 +4760,6 @@ binary_comp_body(Node) ->
%% =====================================================================
-%% @doc Creates an abstract Mnemosyne rule. If `Clauses' is
-%% `[C1, ..., Cn]', the results represents
-%% "<code><em>Name</em> <em>C1</em>; ...; <em>Name</em>
-%% <em>Cn</em>.</code>". More exactly, if each `Ci'
-%% represents "<code>(<em>Pi1</em>, ..., <em>Pim</em>) <em>Gi</em> ->
-%% <em>Bi</em></code>", then the result represents
-%% "<code><em>Name</em>(<em>P11</em>, ..., <em>P1m</em>) <em>G1</em> :-
-%% <em>B1</em>; ...; <em>Name</em>(<em>Pn1</em>, ..., <em>Pnm</em>)
-%% <em>Gn</em> :- <em>Bn</em>.</code>". Rules are source code forms.
-%%
-%% @see rule_name/1
-%% @see rule_clauses/1
-%% @see rule_arity/1
-%% @see is_form/1
-%% @see function/2
-
--record(rule, {name :: syntaxTree(), clauses :: [syntaxTree()]}).
-
-%% type(Node) = rule
-%% data(Node) = #rule{name :: Name, clauses :: Clauses}
-%%
-%% Name = syntaxTree()
-%% Clauses = [syntaxTree()]
-%%
-%% (See `function' for notes on why the arity is not stored.)
-%%
-%% `erl_parse' representation:
-%%
-%% {rule, Pos, Name, Arity, Clauses}
-%%
-%% Name = atom()
-%% Arity = integer()
-%% Clauses = [Clause] \ []
-%% Clause = {clause, ...}
-%%
-%% where the number of patterns in each clause should be equal to
-%% the integer `Arity'; see `clause' for documentation on
-%% `erl_parse' clauses.
-
--spec rule(syntaxTree(), [syntaxTree()]) -> syntaxTree().
-
-rule(Name, Clauses) ->
- tree(rule, #rule{name = Name, clauses = Clauses}).
-
-revert_rule(Node) ->
- Name = rule_name(Node),
- Clauses = [revert_clause(C) || C <- rule_clauses(Node)],
- Pos = get_pos(Node),
- case type(Name) of
- atom ->
- A = rule_arity(Node),
- {rule, Pos, concrete(Name), A, Clauses};
- _ ->
- Node
- end.
-
-
-%% =====================================================================
-%% @doc Returns the name subtree of a `rule' node.
-%%
-%% @see rule/2
-
--spec rule_name(syntaxTree()) -> syntaxTree().
-
-rule_name(Node) ->
- case unwrap(Node) of
- {rule, Pos, Name, _, _} ->
- set_pos(atom(Name), Pos);
- Node1 ->
- (data(Node1))#rule.name
- end.
-
-%% =====================================================================
-%% @doc Returns the list of clause subtrees of a `rule' node.
-%%
-%% @see rule/2
-
--spec rule_clauses(syntaxTree()) -> [syntaxTree()].
-
-rule_clauses(Node) ->
- case unwrap(Node) of
- {rule, _, _, _, Clauses} ->
- Clauses;
- Node1 ->
- (data(Node1))#rule.clauses
- end.
-
-%% =====================================================================
-%% @doc Returns the arity of a `rule' node. The result is the
-%% number of parameter patterns in the first clause of the rule;
-%% subsequent clauses are ignored.
-%%
-%% An exception is thrown if `rule_clauses(Node)' returns
-%% an empty list, or if the first element of that list is not a syntax
-%% tree `C' of type `clause' such that
-%% `clause_patterns(C)' is a nonempty list.
-%%
-%% @see rule/2
-%% @see rule_clauses/1
-%% @see clause/3
-%% @see clause_patterns/1
-
--spec rule_arity(syntaxTree()) -> arity().
-
-rule_arity(Node) ->
- %% Note that this never accesses the arity field of
- %% `erl_parse' rule nodes.
- length(clause_patterns(hd(rule_clauses(Node)))).
-
-
-%% =====================================================================
%% @doc Creates an abstract generator. The result represents
%% "<code><em>Pattern</em> &lt;- <em>Body</em></code>".
%%
@@ -6403,8 +6244,6 @@ revert_root(Node) ->
revert_record_expr(Node);
record_index_expr ->
revert_record_index_expr(Node);
- rule ->
- revert_rule(Node);
string ->
revert_string(Node);
try_expr ->
@@ -6661,15 +6500,9 @@ subtrees(T) ->
receive_expr_action(T)]
end;
record_access ->
- case record_access_type(T) of
- none ->
- [[record_access_argument(T)],
- [record_access_field(T)]];
- R ->
- [[record_access_argument(T)],
- [R],
- [record_access_field(T)]]
- end;
+ [[record_access_argument(T)],
+ [record_access_type(T)],
+ [record_access_field(T)]];
record_expr ->
case record_expr_argument(T) of
none ->
@@ -6690,8 +6523,6 @@ subtrees(T) ->
record_index_expr ->
[[record_index_expr_type(T)],
[record_index_expr_field(T)]];
- rule ->
- [[rule_name(T)], rule_clauses(T)];
size_qualifier ->
[[size_qualifier_body(T)],
[size_qualifier_argument(T)]];
@@ -6786,8 +6617,6 @@ make_tree(parentheses, [[E]]) -> parentheses(E);
make_tree(prefix_expr, [[F], [A]]) -> prefix_expr(F, A);
make_tree(receive_expr, [C]) -> receive_expr(C);
make_tree(receive_expr, [C, [E], A]) -> receive_expr(C, E, A);
-make_tree(record_access, [[E], [F]]) ->
- record_access(E, F);
make_tree(record_access, [[E], [T], [F]]) ->
record_access(E, T, F);
make_tree(record_expr, [[T], F]) -> record_expr(T, F);
@@ -6796,7 +6625,6 @@ make_tree(record_field, [[N]]) -> record_field(N);
make_tree(record_field, [[N], [E]]) -> record_field(N, E);
make_tree(record_index_expr, [[T], [F]]) ->
record_index_expr(T, F);
-make_tree(rule, [[N], C]) -> rule(N, C);
make_tree(size_qualifier, [[N], [A]]) -> size_qualifier(N, A);
make_tree(try_expr, [B, C, H, A]) -> try_expr(B, C, H, A);
make_tree(tuple, [E]) -> tuple(E).
diff --git a/lib/syntax_tools/src/erl_syntax_lib.erl b/lib/syntax_tools/src/erl_syntax_lib.erl
index 2f0488abec..5b5b18d15b 100644
--- a/lib/syntax_tools/src/erl_syntax_lib.erl
+++ b/lib/syntax_tools/src/erl_syntax_lib.erl
@@ -35,8 +35,7 @@
analyze_function_name/1, analyze_implicit_fun/1,
analyze_import_attribute/1, analyze_module_attribute/1,
analyze_record_attribute/1, analyze_record_expr/1,
- analyze_record_field/1, analyze_rule/1,
- analyze_wild_attribute/1, annotate_bindings/1,
+ analyze_record_field/1, analyze_wild_attribute/1, annotate_bindings/1,
annotate_bindings/2, fold/3, fold_subtrees/3, foldl_listlist/3,
function_name_expansions/1, is_fail_expr/1, limit/2, limit/3,
map/2, map_subtrees/2, mapfold/3, mapfold_subtrees/3,
@@ -527,8 +526,6 @@ vann(Tree, Env) ->
vann_try_expr(Tree, Env);
function ->
vann_function(Tree, Env);
- rule ->
- vann_rule(Tree, Env);
fun_expr ->
vann_fun_expr(Tree, Env);
list_comp ->
@@ -569,15 +566,6 @@ vann_function(Tree, Env) ->
Bound = [],
{ann_bindings(Tree1, Env, Bound, Free), Bound, Free}.
-vann_rule(Tree, Env) ->
- Cs = erl_syntax:rule_clauses(Tree),
- {Cs1, {_, Free}} = vann_clauses(Cs, Env),
- N = erl_syntax:rule_name(Tree),
- {N1, _, _} = vann(N, Env),
- Tree1 = rewrite(Tree, erl_syntax:rule(N1, Cs1)),
- Bound = [],
- {ann_bindings(Tree1, Env, Bound, Free), Bound, Free}.
-
vann_fun_expr(Tree, Env) ->
Cs = erl_syntax:fun_expr_clauses(Tree),
{Cs1, {_, Free}} = vann_clauses(Cs, Env),
@@ -946,7 +934,7 @@ is_fail_expr(E) ->
%%
%% Forms = syntaxTree() | [syntaxTree()]
%% Key = attributes | errors | exports | functions | imports
-%% | module | records | rules | warnings
+%% | module | records | warnings
%%
%% @doc Analyzes a sequence of "program forms". The given
%% `Forms' may be a single syntax tree of type
@@ -1047,16 +1035,6 @@ is_fail_expr(E) ->
%% that each record name occurs at most once in the list. The
%% order of listing is not defined.</dd>
%%
-%% <dt>`{rules, Rules}'</dt>
-%% <dd><ul>
-%% <li>`Rules = [{atom(), integer()}]'</li>
-%% </ul>
-%% `Rules' is a list of the names of the rules that are
-%% defined in `Forms' (cf.
-%% `analyze_rule/1'). We do not guarantee that each
-%% name occurs at most once in the list. The order of listing is
-%% not defined.</dd>
-%%
%% <dt>`{warnings, Warnings}'</dt>
%% <dd><ul>
%% <li>`Warnings = [term()]'</li>
@@ -1074,12 +1052,11 @@ is_fail_expr(E) ->
%% @see analyze_import_attribute/1
%% @see analyze_record_attribute/1
%% @see analyze_function/1
-%% @see analyze_rule/1
%% @see erl_syntax:error_marker_info/1
%% @see erl_syntax:warning_marker_info/1
-type key() :: 'attributes' | 'errors' | 'exports' | 'functions' | 'imports'
- | 'module' | 'records' | 'rules' | 'warnings'.
+ | 'module' | 'records' | 'warnings'.
-type info_pair() :: {key(), term()}.
-spec analyze_forms(erl_syntax:forms()) -> [info_pair()].
@@ -1099,8 +1076,6 @@ collect_form(Node, Info) ->
Info;
{function, Name} ->
finfo_add_function(Name, Info);
- {rule, Name} ->
- finfo_add_rule(Name, Info);
{error_marker, Data} ->
finfo_add_error(Data, Info);
{warning_marker, Data} ->
@@ -1136,8 +1111,7 @@ collect_attribute(_, {N, V}, Info) ->
records = [] :: [{atom(), [{atom(), field_default()}]}],
errors = [] :: [term()],
warnings = [] :: [term()],
- functions = [] :: [{atom(), arity()}],
- rules = [] :: [{atom(), arity()}]}).
+ functions = [] :: [{atom(), arity()}]}).
-type field_default() :: 'none' | erl_syntax:syntaxTree().
@@ -1183,9 +1157,6 @@ finfo_add_warning(R, Info) ->
finfo_add_function(F, Info) ->
Info#forms{functions = [F | Info#forms.functions]}.
-finfo_add_rule(F, Info) ->
- Info#forms{rules = [F | Info#forms.rules]}.
-
finfo_to_list(Info) ->
[{Key, Value}
|| {Key, {value, Value}} <-
@@ -1197,8 +1168,7 @@ finfo_to_list(Info) ->
{records, list_value(Info#forms.records)},
{errors, list_value(Info#forms.errors)},
{warnings, list_value(Info#forms.warnings)},
- {functions, list_value(Info#forms.functions)},
- {rules, list_value(Info#forms.rules)}
+ {functions, list_value(Info#forms.functions)}
]].
list_value([]) ->
@@ -1229,10 +1199,6 @@ list_value(List) ->
%%
%% <dd>where `Info = analyze_function(Node)'.</dd>
%%
-%% <dt>`{rule, Info}'</dt>
-%%
-%% <dd>where `Info = analyze_rule(Node)'.</dd>
-%%
%% <dt>`{warning_marker, Info}'</dt>
%%
%% <dd>where `Info =
@@ -1245,7 +1211,6 @@ list_value(List) ->
%%
%% @see analyze_attribute/1
%% @see analyze_function/1
-%% @see analyze_rule/1
%% @see erl_syntax:is_form/1
%% @see erl_syntax:error_marker_info/1
%% @see erl_syntax:warning_marker_info/1
@@ -1258,8 +1223,6 @@ analyze_form(Node) ->
{attribute, analyze_attribute(Node)};
function ->
{function, analyze_function(Node)};
- rule ->
- {rule, analyze_rule(Node)};
error_marker ->
{error_marker, erl_syntax:error_marker_info(Node)};
warning_marker ->
@@ -1669,7 +1632,7 @@ analyze_record_attribute_tuple(Node) ->
%% <dt>`record_expr':</dt>
%% <dd>`{atom(), [{atom(), Value}]}'</dd>
%% <dt>`record_access':</dt>
-%% <dd>`{atom(), atom()} | atom()'</dd>
+%% <dd>`{atom(), atom()}'</dd>
%% <dt>`record_index_expr':</dt>
%% <dd>`{atom(), atom()}'</dd>
%% </dl>
@@ -1679,9 +1642,7 @@ analyze_record_attribute_tuple(Node) ->
%% listed in the order they appear. (See
%% `analyze_record_field/1' for details on the field
%% descriptors). For a `record_access' node,
-%% `Info' represents the record name and the field name (or
-%% if the record name is not included, only the field name; this is
-%% allowed only in Mnemosyne-query syntax). For a
+%% `Info' represents the record name and the field name. For a
%% `record_index_expr' node, `Info' represents the
%% record name and the name field name.
%%
@@ -1713,18 +1674,14 @@ analyze_record_expr(Node) ->
F = erl_syntax:record_access_field(Node),
case erl_syntax:type(F) of
atom ->
- case erl_syntax:record_access_type(Node) of
- none ->
- {record_access, erl_syntax:atom_value(F)};
- A ->
- case erl_syntax:type(A) of
- atom ->
- {record_access,
- {erl_syntax:atom_value(A),
- erl_syntax:atom_value(F)}};
- _ ->
- throw(syntax_error)
- end
+ A = erl_syntax:record_access_type(Node),
+ case erl_syntax:type(A) of
+ atom ->
+ {record_access,
+ {erl_syntax:atom_value(A),
+ erl_syntax:atom_value(F)}};
+ _ ->
+ throw(syntax_error)
end;
_ ->
throw(syntax_error)
@@ -1835,8 +1792,6 @@ analyze_file_attribute(Node) ->
%% The evaluation throws `syntax_error' if
%% `Node' does not represent a well-formed function
%% definition.
-%%
-%% @see analyze_rule/1
-spec analyze_function(erl_syntax:syntaxTree()) -> {atom(), arity()}.
@@ -1857,37 +1812,6 @@ analyze_function(Node) ->
%% =====================================================================
-%% @spec analyze_rule(Node::syntaxTree()) -> {atom(), integer()}
-%%
-%% @doc Returns the name and arity of a Mnemosyne rule. The result is a
-%% pair `{Name, A}' if `Node' represents a rule
-%% "`Name(<em>P_1</em>, ..., <em>P_A</em>) :- ...'".
-%%
-%% The evaluation throws `syntax_error' if
-%% `Node' does not represent a well-formed Mnemosyne
-%% rule.
-%%
-%% @see analyze_function/1
-
--spec analyze_rule(erl_syntax:syntaxTree()) -> {atom(), arity()}.
-
-analyze_rule(Node) ->
- case erl_syntax:type(Node) of
- rule ->
- N = erl_syntax:rule_name(Node),
- case erl_syntax:type(N) of
- atom ->
- {erl_syntax:atom_value(N),
- erl_syntax:rule_arity(Node)};
- _ ->
- throw(syntax_error)
- end;
- _ ->
- throw(syntax_error)
- end.
-
-
-%% =====================================================================
%% @spec analyze_implicit_fun(Node::syntaxTree()) -> FunctionName
%%
%% FunctionName = atom() | {atom(), integer()}
diff --git a/lib/syntax_tools/src/erl_tidy.erl b/lib/syntax_tools/src/erl_tidy.erl
index 38e0c2099b..db7f0939a3 100644
--- a/lib/syntax_tools/src/erl_tidy.erl
+++ b/lib/syntax_tools/src/erl_tidy.erl
@@ -792,16 +792,11 @@ keep_form(Form, Used, Opts) ->
N = erl_syntax_lib:analyze_function(Form),
case sets:is_element(N, Used) of
false ->
- report_removed_def("function", N, Form, Opts),
- false;
- true ->
- true
- end;
- rule ->
- N = erl_syntax_lib:analyze_rule(Form),
- case sets:is_element(N, Used) of
- false ->
- report_removed_def("rule", N, Form, Opts),
+ {F, A} = N,
+ File = proplists:get_value(file, Opts, ""),
+ report({File, erl_syntax:get_pos(Form),
+ "removing unused function `~w/~w'."},
+ [F, A], Opts),
false;
true ->
true
@@ -823,12 +818,6 @@ keep_form(Form, Used, Opts) ->
true
end.
-report_removed_def(Type, {N, A}, Form, Opts) ->
- File = proplists:get_value(file, Opts, ""),
- report({File, erl_syntax:get_pos(Form),
- "removing unused ~s `~w/~w'."},
- [Type, N, A], Opts).
-
collect_functions(Forms) ->
lists:foldl(
fun (F, {Names, Defs}) ->
@@ -837,10 +826,6 @@ collect_functions(Forms) ->
N = erl_syntax_lib:analyze_function(F),
{sets:add_element(N, Names),
dict:store(N, {F, []}, Defs)};
- rule ->
- N = erl_syntax_lib:analyze_rule(F),
- {sets:add_element(N, Names),
- dict:store(N, {F, []}, Defs)};
_ ->
{Names, Defs}
end
@@ -855,11 +840,6 @@ update_forms([F | Fs], Defs, Imports, Opts) ->
{F1, Fs1} = dict:fetch(N, Defs),
[F1 | lists:reverse(Fs1)] ++ update_forms(Fs, Defs, Imports,
Opts);
- rule ->
- N = erl_syntax_lib:analyze_rule(F),
- {F1, Fs1} = dict:fetch(N, Defs),
- [F1 | lists:reverse(Fs1)] ++ update_forms(Fs, Defs, Imports,
- Opts);
attribute ->
[update_attribute(F, Imports, Opts)
| update_forms(Fs, Defs, Imports, Opts)];
diff --git a/lib/syntax_tools/src/igor.erl b/lib/syntax_tools/src/igor.erl
index 0420508f2a..eac5af5540 100644
--- a/lib/syntax_tools/src/igor.erl
+++ b/lib/syntax_tools/src/igor.erl
@@ -1713,8 +1713,6 @@ transform(Tree, Env, St) ->
transform_function(Tree, Env, St);
implicit_fun ->
transform_implicit_fun(Tree, Env, St);
- rule ->
- transform_rule(Tree, Env, St);
record_expr ->
transform_record(Tree, Env, St);
record_index_expr ->
@@ -1778,27 +1776,6 @@ renaming_note(Name) ->
rename_atom(Node, Atom) ->
rewrite(Node, erl_syntax:atom(Atom)).
-%% Renaming Mnemosyne rules (just like function definitions)
-
-transform_rule(T, Env, St) ->
- {T1, St1} = default_transform(T, Env, St),
- F = erl_syntax_lib:analyze_rule(T1),
- {V, Text} = case (Env#code.map)(F) of
- F ->
- %% Not renamed
- {none, []};
- {Atom, _Arity} ->
- %% Renamed
- Cs = erl_syntax:rule_clauses(T1),
- N = rename_atom(
- erl_syntax:rule_name(T1),
- Atom),
- T2 = rewrite(T1,
- erl_syntax:rule(N, Cs)),
- {{value, T2}, renaming_note(Atom)}
- end,
- {maybe_modified(V, T1, 2, Text, Env), St1}.
-
%% Renaming "implicit fun" expressions (done quietly).
transform_implicit_fun(T, Env, St) ->
diff --git a/lib/test_server/src/erl2html2.erl b/lib/test_server/src/erl2html2.erl
index b9b45cda25..7cfaa2c325 100644
--- a/lib/test_server/src/erl2html2.erl
+++ b/lib/test_server/src/erl2html2.erl
@@ -22,11 +22,11 @@
%%%------------------------------------------------------------------
-module(erl2html2).
--export([convert/2, convert/3]).
+-export([convert/3, convert/4]).
-convert([], _Dest) -> % Fake clause.
+convert([], _Dest, _InclPath) -> % Fake clause.
ok;
-convert(File, Dest) ->
+convert(File, Dest, InclPath) ->
%% The generated code uses the BGCOLOR attribute in the
%% BODY tag, which wasn't valid until HTML 3.2. Also,
%% good HTML should either override all colour attributes
@@ -48,12 +48,12 @@ convert(File, Dest) ->
"</head>\n\n"
"<body bgcolor=\"white\" text=\"black\""
" link=\"blue\" vlink=\"purple\" alink=\"red\">\n"],
- convert(File, Dest, Header).
+ convert(File, Dest, InclPath, Header).
-convert(File, Dest, Header) ->
+convert(File, Dest, InclPath, Header) ->
%% statistics(runtime),
- case parse_file(File) of
+ case parse_file(File, InclPath) of
{ok,Functions} ->
%% {_, Time1} = statistics(runtime),
%% io:format("Parsed file in ~.2f Seconds.~n",[Time1/1000]),
@@ -92,8 +92,8 @@ convert(File, Dest, Header) ->
%%% Use expanded preprocessor directives if possible (epp). Only if
%%% this fails, fall back on using non-expanded code (epp_dodger).
-parse_file(File) ->
- case epp:open(File, [], []) of
+parse_file(File, InclPath) ->
+ case epp:open(File, InclPath, []) of
{ok,Epp} ->
try parse_preprocessed_file(Epp,File,false) of
Forms ->
@@ -145,13 +145,15 @@ parse_non_preprocessed_file(File) ->
parse_non_preprocessed_file(Epp, File, Location) ->
case epp_dodger:parse_form(Epp, Location) of
{ok,Tree,Location1} ->
- case erl_syntax:revert(Tree) of
+ try erl_syntax:revert(Tree) of
{function,L,F,A,[_|C]} ->
Clauses = [{clause,CL} || {clause,CL,_,_,_} <- C],
[{atom_to_list(F),A,L} | Clauses] ++
parse_non_preprocessed_file(Epp, File, Location1);
_ ->
parse_non_preprocessed_file(Epp, File, Location1)
+ catch
+ _:_ -> parse_non_preprocessed_file(Epp, File, Location1)
end;
{error,_E,Location1} ->
parse_non_preprocessed_file(Epp, File, Location1);
diff --git a/lib/test_server/src/test_server.erl b/lib/test_server/src/test_server.erl
index 9192a76a17..0e685a2d8a 100644
--- a/lib/test_server/src/test_server.erl
+++ b/lib/test_server/src/test_server.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -130,7 +130,8 @@ cover_compile(CoverInfo=#cover{app=App,excl=all,incl=Include,cross=Cross}) ->
io:fwrite("done\n\n",[]),
{ok,CoverInfo#cover{mods=Include}}
end;
-cover_compile(CoverInfo=#cover{app=App,excl=Exclude,incl=Include,cross=Cross}) ->
+cover_compile(CoverInfo=#cover{app=App,excl=Exclude,
+ incl=Include,cross=Cross}) ->
CrossMods = lists:flatmap(fun({_,M}) -> M end,Cross),
case code:lib_dir(App) of
{error,bad_name} ->
@@ -177,68 +178,35 @@ module_names(Beams) ->
do_cover_compile(Modules) ->
cover:start(),
- pmap1(fun(M) -> do_cover_compile1(M) end,lists:usort(Modules)),
+ Sticky = prepare_cover_compile(Modules,[]),
+ R = cover:compile_beam(Modules),
+ [warn_compile(Error) || Error <- R,element(1,Error)=/=ok],
+ [code:stick_mod(M) || M <- Sticky],
ok.
-do_cover_compile1(M) ->
+warn_compile({error,{Reason,Module}}) ->
+ io:fwrite("\nWARNING: Could not cover compile ~ts: ~p\n",
+ [Module,{error,Reason}]).
+
+%% Make sure all modules are loaded and unstick if sticky
+prepare_cover_compile([M|Ms],Sticky) ->
case {code:is_sticky(M),code:is_loaded(M)} of
{true,_} ->
code:unstick_mod(M),
- case cover:compile_beam(M) of
- {ok,_} ->
- ok;
- Error ->
- io:fwrite("\nWARNING: Could not cover compile ~w: ~p\n",
- [M,Error])
- end,
- code:stick_mod(M);
+ prepare_cover_compile(Ms,[M|Sticky]);
{false,false} ->
case code:load_file(M) of
{module,_} ->
- do_cover_compile1(M);
+ prepare_cover_compile([M|Ms],Sticky);
Error ->
- io:fwrite("\nWARNING: Could not load ~w: ~p\n",[M,Error])
+ io:fwrite("\nWARNING: Could not load ~w: ~p\n",[M,Error]),
+ prepare_cover_compile(Ms,Sticky)
end;
{false,_} ->
- case cover:compile_beam(M) of
- {ok,_} ->
- ok;
- Error ->
- io:fwrite("\nWARNING: Could not cover compile ~w: ~p\n",
- [M,Error])
- end
- end.
-
-pmap1(Fun,List) ->
- NTot = length(List),
- NProcs = erlang:system_info(schedulers) * 2,
- NPerProc = (NTot div NProcs) + 1,
-
- {[],Pids} =
- lists:foldr(
- fun(_,{L,Ps}) ->
- {L1,L2} = if length(L)>=NPerProc -> lists:split(NPerProc,L);
- true -> {L,[]} % last chunk
- end,
- {P,_Ref} =
- spawn_monitor(fun() ->
- exit(lists:map(Fun,L1))
- end),
- {L2,[P|Ps]}
- end,
- {List,[]},
- lists:seq(1,NProcs)),
- collect(Pids,[]).
-
-collect([],Acc) ->
- lists:append(Acc);
-collect([Pid|Pids],Acc) ->
- receive
- {'DOWN', _Ref, process, Pid, Result} ->
- %% collect(lists:delete(Pid,Pids),[Result|Acc])
- collect(Pids,[Result|Acc])
- end.
-
+ prepare_cover_compile(Ms,Sticky)
+ end;
+prepare_cover_compile([],Sticky) ->
+ Sticky.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% cover_analyse(Dir,#cover{level=Analyse,mods=Modules,stop=Stop) ->
@@ -268,45 +236,40 @@ collect([Pid|Pids],Acc) ->
%% after the test is completed.
cover_analyse(Dir,#cover{level=Analyse,mods=Modules,stop=Stop}) ->
io:fwrite(user, "Cover analysing... ", []),
- DetailsFun =
+ {ATFOk,ATFFail} =
case Analyse of
details ->
case cover:export(filename:join(Dir,"all.coverdata")) of
ok ->
- fun(M) ->
- OutFile = filename:join(Dir,
- atom_to_list(M) ++
- ".COVER.html"),
- case cover:analyse_to_file(M,OutFile,[html]) of
- {ok,_} ->
- {file,OutFile};
- Error ->
- Error
- end
- end;
+ {result,Ok1,Fail1} =
+ cover:analyse_to_file(Modules,[{outdir,Dir},html]),
+ {lists:map(fun(OutFile) ->
+ M = list_to_atom(
+ filename:basename(
+ filename:rootname(OutFile,
+ ".COVER.html")
+ )
+ ),
+ {M,{file,OutFile}}
+ end, Ok1),
+ lists:map(fun({Reason,M}) ->
+ {M,{error,Reason}}
+ end, Fail1)};
Error ->
- fun(_) -> Error end
+ {[],lists:map(fun(M) -> {M,Error} end, Modules)}
end;
overview ->
case cover:export(filename:join(Dir,"all.coverdata")) of
ok ->
- fun(_) -> undefined end;
+ {[],lists:map(fun(M) -> {M,undefined} end, Modules)};
Error ->
- fun(_) -> Error end
+ {[],lists:map(fun(M) -> {M,Error} end, Modules)}
end
end,
- R = pmap2(
- fun(M) ->
- case cover:analyse(M,module) of
- {ok,{M,{Cov,NotCov}}} ->
- {M,{Cov,NotCov,DetailsFun(M)}};
- Err ->
- io:fwrite(user,
- "\nWARNING: Analysis failed for ~w. Reason: ~p\n",
- [M,Err]),
- {M,Err}
- end
- end, Modules),
+ {result,AOk,AFail} = cover:analyse(Modules,module),
+ R0 = merge_analysis_results(AOk,ATFOk++ATFFail,[]) ++
+ [{M,{error,Reason}} || {Reason,M} <- AFail],
+ R = lists:sort(R0),
io:fwrite(user, "done\n\n", []),
case Stop of
@@ -319,19 +282,15 @@ cover_analyse(Dir,#cover{level=Analyse,mods=Modules,stop=Stop}) ->
end,
R.
-pmap2(Fun,List) ->
- Collector = self(),
- Pids = lists:map(fun(E) ->
- spawn(fun() ->
- Collector ! {res,self(),Fun(E)}
- end)
- end, List),
- lists:map(fun(Pid) ->
- receive
- {res,Pid,Res} ->
- Res
- end
- end, Pids).
+merge_analysis_results([{M,{Cov,NotCov}}|T],ATF,Acc) ->
+ case lists:keytake(M,1,ATF) of
+ {value,{_,R},ATF1} ->
+ merge_analysis_results(T,ATF1,[{M,{Cov,NotCov,R}}|Acc]);
+ false ->
+ merge_analysis_results(T,ATF,Acc)
+ end;
+merge_analysis_results([],_,Acc) ->
+ Acc.
do_cover_for_node(Node,CoverFunc) ->
do_cover_for_node(Node,CoverFunc,true).
@@ -779,7 +738,9 @@ do_call_end_conf(Starter,Mod,Func,Data,Conf,TVal) ->
EndConfApply =
fun() ->
timetrap(TVal),
- case catch apply(Mod,end_per_testcase,[Func,Conf]) of
+ case catch apply(Mod,
+ end_per_testcase,
+ [Func,Conf]) of
{'EXIT',Why} ->
timer:sleep(1),
group_leader() ! {printout,12,
@@ -817,7 +778,9 @@ spawn_fw_call(Mod,{init_per_testcase,Func},CurrConf,Pid,
Skip = {skip,{failed,{Mod,init_per_testcase,Why}}},
%% if init_per_testcase fails, the test case
%% should be skipped
- case catch do_end_tc_call(Mod,Func, {Pid,Skip,[CurrConf]}, Why) of
+ case catch do_end_tc_call(Mod,Func,
+ {Pid,Skip,[CurrConf]},
+ Why) of
{'EXIT',FwEndTCErr} ->
exit({fw_notify_done,end_tc,FwEndTCErr});
_ ->
@@ -984,12 +947,15 @@ run_test_case_eval(Mod, Func, Args0, Name, Ref, RunInit,
NewResult = do_end_tc_call(Mod,Func, {{error,Reason},[Conf]},
{fail,Reason}),
{{0,NewResult},Where,[]};
- Skip = {skip,_Reason} ->
- NewResult = do_end_tc_call(Mod,Func, {Skip,Args0}, Skip),
+ Skip = {SkipType,_Reason} when SkipType == skip;
+ SkipType == skipped ->
+ NewResult = do_end_tc_call(Mod,Func,
+ {Skip,Args0}, Skip),
{{0,NewResult},Where,[]};
AutoSkip = {auto_skip,_Reason} ->
%% special case where a conf case "pretends" to be skipped
- NewResult = do_end_tc_call(Mod,Func, {AutoSkip,Args0}, AutoSkip),
+ NewResult =
+ do_end_tc_call(Mod,Func, {AutoSkip,Args0}, AutoSkip),
{{0,NewResult},Where,[]}
end,
exit({Ref,Time,Value,Loc,Opts}).
@@ -1000,10 +966,12 @@ run_test_case_eval1(Mod, Func, Args, Name, RunInit, TCCallback) ->
set_tc_state(init_per_testcase, hd(Args)),
ensure_timetrap(Args),
case init_per_testcase(Mod, Func, Args) of
- Skip = {skip,Reason} ->
+ Skip = {SkipType,Reason} when SkipType == skip;
+ SkipType == skipped ->
Line = get_loc(),
Conf = [{tc_status,{skipped,Reason}}|hd(Args)],
- NewRes = do_end_tc_call(Mod,Func, {Skip,[Conf]}, Skip),
+ NewRes = do_end_tc_call(Mod,Func,
+ {Skip,[Conf]}, Skip),
{{0,NewRes},Line,[]};
{skip_and_save,Reason,SaveCfg} ->
Line = get_loc(),
@@ -1021,11 +989,12 @@ run_test_case_eval1(Mod, Func, Args, Name, RunInit, TCCallback) ->
{{0,NewRes},[{Mod,Func}],[]};
{ok,NewConf} ->
%% call user callback function if defined
- NewConf1 = user_callback(TCCallback, Mod, Func, init, NewConf),
+ NewConf1 =
+ user_callback(TCCallback, Mod, Func, init, NewConf),
%% save current state in controller loop
set_tc_state(tc, NewConf1),
%% execute the test case
- {{T,Return},Loc} = {ts_tc(Mod, Func, [NewConf1]),get_loc()},
+ {{T,Return},Loc} = {ts_tc(Mod,Func,[NewConf1]), get_loc()},
{EndConf,TSReturn,FWReturn} =
case Return of
{E,TCError} when E=='EXIT' ; E==failed ->
@@ -1041,30 +1010,39 @@ run_test_case_eval1(Mod, Func, Args, Name, RunInit, TCCallback) ->
{[{tc_status,{skipped,Why}},
{save_config,SaveCfg}|NewConf1],
Skip,Skip};
- {skip,Why} ->
- {[{tc_status,{skipped,Why}}|NewConf1],Return,Return};
+ {SkipType,Why} when SkipType == skip;
+ SkipType == skipped ->
+ {[{tc_status,{skipped,Why}}|NewConf1],Return,
+ Return};
_ ->
{[{tc_status,ok}|NewConf1],Return,ok}
end,
%% call user callback function if defined
- EndConf1 = user_callback(TCCallback, Mod, Func, 'end', EndConf),
+ EndConf1 =
+ user_callback(TCCallback, Mod, Func, 'end', EndConf),
%% update current state in controller loop
{FWReturn1,TSReturn1,EndConf2} =
case end_per_testcase(Mod, Func, EndConf1) of
SaveCfg1={save_config,_} ->
- {FWReturn,TSReturn,[SaveCfg1|lists:keydelete(save_config,1,
- EndConf1)]};
+ {FWReturn,TSReturn,
+ [SaveCfg1|lists:keydelete(save_config,1,
+ EndConf1)]};
{fail,ReasonToFail} ->
%% user has failed the testcase
- fw_error_notify(Mod, Func, EndConf1, ReasonToFail),
- {{error,ReasonToFail},{failed,ReasonToFail},EndConf1};
- {failed,{_,end_per_testcase,_}} = Failure when FWReturn == ok ->
+ fw_error_notify(Mod, Func, EndConf1,
+ ReasonToFail),
+ {{error,ReasonToFail},
+ {failed,ReasonToFail},
+ EndConf1};
+ {failed,{_,end_per_testcase,_}} = Failure when
+ FWReturn == ok ->
%% unexpected termination in end_per_testcase
%% report this as the result to the framework
{Failure,TSReturn,EndConf1};
_ ->
- %% test case result should be reported to framework
- %% no matter the status of end_per_testcase
+ %% test case result should be reported to
+ %% framework no matter the status of
+ %% end_per_testcase
{FWReturn,TSReturn,EndConf1}
end,
%% clear current state in controller loop
@@ -1131,7 +1109,8 @@ process_return_val([Return], M,F,A, Loc, Final) when is_list(Return) ->
ReturnTags = [skip,skip_and_save,save_config,comment,return_group_result],
%% check if all elements in the list are valid end conf return value tuples
case lists:all(fun(Val) when is_tuple(Val) ->
- lists:any(fun(T) -> T == element(1, Val) end, ReturnTags);
+ lists:any(fun(T) -> T == element(1, Val) end,
+ ReturnTags);
(ok) ->
true;
(_) ->
@@ -1165,14 +1144,19 @@ process_return_val1([Failed={E,TCError}|_], M,F,A=[Args], Loc, _, SaveOpts)
NewReturn ->
{NewReturn,SaveOpts}
end;
-process_return_val1([SaveCfg={save_config,_}|Opts], M,F,[Args], Loc, Final, SaveOpts) ->
+process_return_val1([SaveCfg={save_config,_}|Opts], M,F,[Args],
+ Loc, Final, SaveOpts) ->
process_return_val1(Opts, M,F,[[SaveCfg|Args]], Loc, Final, SaveOpts);
-process_return_val1([{skip_and_save,Why,SaveCfg}|Opts], M,F,[Args], Loc, _, SaveOpts) ->
- process_return_val1(Opts, M,F,[[{save_config,SaveCfg}|Args]], Loc, {skip,Why}, SaveOpts);
-process_return_val1([GR={return_group_result,_}|Opts], M,F,A, Loc, Final, SaveOpts) ->
+process_return_val1([{skip_and_save,Why,SaveCfg}|Opts], M,F,[Args],
+ Loc, _, SaveOpts) ->
+ process_return_val1(Opts, M,F,[[{save_config,SaveCfg}|Args]],
+ Loc, {skip,Why}, SaveOpts);
+process_return_val1([GR={return_group_result,_}|Opts], M,F,A,
+ Loc, Final, SaveOpts) ->
process_return_val1(Opts, M,F,A, Loc, Final, [GR|SaveOpts]);
-process_return_val1([RetVal={Tag,_}|Opts], M,F,A, Loc, _, SaveOpts) when Tag==skip;
- Tag==comment ->
+process_return_val1([RetVal={Tag,_}|Opts], M,F,A,
+ Loc, _, SaveOpts) when Tag==skip;
+ Tag==comment ->
process_return_val1(Opts, M,F,A, Loc, RetVal, SaveOpts);
process_return_val1([_|Opts], M,F,A, Loc, Final, SaveOpts) ->
process_return_val1(Opts, M,F,A, Loc, Final, SaveOpts);
@@ -1186,7 +1170,8 @@ process_return_val1([], M,F,A, _Loc, Final, SaveOpts) ->
user_callback(undefined, _, _, _, Args) ->
Args;
-user_callback({CBMod,CBFunc}, Mod, Func, InitOrEnd, [Args]) when is_list(Args) ->
+user_callback({CBMod,CBFunc}, Mod, Func, InitOrEnd,
+ [Args]) when is_list(Args) ->
case catch apply(CBMod, CBFunc, [InitOrEnd,Mod,Func,Args]) of
Args1 when is_list(Args1) ->
[Args1];
@@ -1778,7 +1763,8 @@ timetrap(Timeout0, TimeToReport0, TCPid, MultAndScale = {Multiplier,Scale}) ->
put(test_server_timetraps,[{Handle,TCPid,{TimeToReport,Scale}}]);
List ->
List1 = lists:delete({infinity,TCPid,{infinity,false}}, List),
- put(test_server_timetraps,[{Handle,TCPid,{TimeToReport,Scale}}|List1])
+ put(test_server_timetraps,[{Handle,TCPid,
+ {TimeToReport,Scale}}|List1])
end,
Handle.
@@ -1837,7 +1823,9 @@ time_ms(Ms, _, _) when is_integer(Ms) -> Ms;
time_ms(infinity, _, _) -> infinity;
time_ms(Fun, TCPid, MultAndScale) when is_function(Fun) ->
time_ms_apply(Fun, TCPid, MultAndScale);
-time_ms({M,F,A}=MFA, TCPid, MultAndScale) when is_atom(M), is_atom(F), is_list(A) ->
+time_ms({M,F,A}=MFA, TCPid, MultAndScale) when is_atom(M),
+ is_atom(F),
+ is_list(A) ->
time_ms_apply(MFA, TCPid, MultAndScale);
time_ms(Other, _, _) -> exit({invalid_time_format,Other}).
diff --git a/lib/test_server/src/test_server_ctrl.erl b/lib/test_server/src/test_server_ctrl.erl
index af8921fe75..488f38d05d 100644
--- a/lib/test_server/src/test_server_ctrl.erl
+++ b/lib/test_server/src/test_server_ctrl.erl
@@ -1927,15 +1927,20 @@ html_possibly_convert(Src, SrcInfo, Dest) ->
{ok,DestInfo} when DestInfo#file_info.mtime >= SrcInfo#file_info.mtime ->
ok; % dest file up to date
_ ->
+ InclPath = case application:get_env(test_server, include) of
+ {ok,Incls} -> Incls;
+ _ -> []
+ end,
+
OutDir = get(test_server_log_dir_base),
case test_server_sup:framework_call(get_html_wrapper,
["Module "++Src,false,
OutDir,undefined,
encoding(Src)], "") of
Empty when (Empty == "") ; (element(2,Empty) == "") ->
- erl2html2:convert(Src, Dest);
+ erl2html2:convert(Src, Dest, InclPath);
{_,Header,_} ->
- erl2html2:convert(Src, Dest, Header)
+ erl2html2:convert(Src, Dest, InclPath, Header)
end
end.
diff --git a/lib/test_server/src/ts_make.erl b/lib/test_server/src/ts_make.erl
index 8727f7ebfe..9cb77ecb12 100644
--- a/lib/test_server/src/ts_make.erl
+++ b/lib/test_server/src/ts_make.erl
@@ -67,7 +67,17 @@ get_port_data(Port, Last0, Complete0) ->
end.
update_last([C|Rest], Line, true) ->
- io:put_chars(list_to_binary(Line)), %% Utf-8 list to utf-8 binary
+ try
+ %% Utf-8 list to utf-8 binary
+ %% (e.g. we assume utf-8 bytes from port)
+ io:put_chars(list_to_binary(Line))
+ catch
+ error:badarg ->
+ %% io:put_chars/1 badarged
+ %% this likely means we had unicode code points
+ %% in our bytes buffer (e.g warning from gcc with åäö)
+ io:put_chars(unicode:characters_to_binary(Line))
+ end,
io:nl(),
update_last([C|Rest], [], false);
update_last([$\r|Rest], Result, Complete) ->
diff --git a/lib/test_server/test/erl2html2_SUITE.erl b/lib/test_server/test/erl2html2_SUITE.erl
index 37c2b74d8e..908985c879 100644
--- a/lib/test_server/test/erl2html2_SUITE.erl
+++ b/lib/test_server/test/erl2html2_SUITE.erl
@@ -161,7 +161,7 @@ convert_module(Mod,Config) ->
Src = filename:join(DataDir,Mod++".erl"),
Dst = filename:join(PrivDir,Mod++".erl.html"),
io:format("<a href=\"~s\">~s</a>\n",[Src,filename:basename(Src)]),
- ok = erl2html2:convert(Src, Dst, "<html><body>"),
+ ok = erl2html2:convert(Src, Dst, [], "<html><body>"),
io:format("<a href=\"~s\">~s</a>\n",[Dst,filename:basename(Dst)]),
{Src,Dst}.
diff --git a/lib/tools/doc/src/cover.xml b/lib/tools/doc/src/cover.xml
index 07ffa65e3d..914baa7977 100644
--- a/lib/tools/doc/src/cover.xml
+++ b/lib/tools/doc/src/cover.xml
@@ -5,7 +5,7 @@
<header>
<copyright>
<year>2001</year>
- <year>2013</year>
+ <year>2015</year>
<holder>Ericsson AB, All Rights Reserved</holder>
</copyright>
<legalnotice>
@@ -138,17 +138,18 @@
</desc>
</func>
<func>
- <name>compile(ModFile) -> Result</name>
- <name>compile(ModFile, Options) -> Result</name>
- <name>compile_module(ModFile) -> Result</name>
- <name>compile_module(ModFile, Options) -> Result</name>
- <fsummary>Compile a module for Cover analysis.</fsummary>
+ <name>compile(ModFiles) -> Result | [Result]</name>
+ <name>compile(ModFiles, Options) -> Result | [Result]</name>
+ <name>compile_module(ModFiles) -> Result | [Result]</name>
+ <name>compile_module(ModFiles, Options) -> Result | [Result]</name>
+ <fsummary>Compile one or more modules for Cover analysis.</fsummary>
<type>
+ <v>ModFiles = ModFile | [ModFile]</v>
<v>ModFile = Module | File</v>
<v>&nbsp;Module = atom()</v>
<v>&nbsp;File = string()</v>
<v>Options = [Option]</v>
- <v>&nbsp;Option = {i,Dir} | {d,Macro} | {d,Macro,Value}</v>
+ <v>&nbsp;Option = {i,Dir} | {d,Macro} | {d,Macro,Value} | export_all</v>
<d>See <c>compile:file/2.</c></d>
<v>Result = {ok,Module} | {error,File} | {error,not_main_node}</v>
</type>
@@ -165,6 +166,9 @@
returns <c>{ok,Module}</c>. Otherwise the function returns
<c>{error,File}</c>. Errors and warnings are printed as they
occur.</p>
+ <p>If a list of <c>ModFiles</c> is given as input, a list
+ of <c>Result</c> will be returned. The order of the returned
+ list is undefined.</p>
<p>Note that the internal database is (re-)initiated during
the compilation, meaning any previously collected coverage data
for the module will be lost.</p>
@@ -194,9 +198,10 @@
</desc>
</func>
<func>
- <name>compile_beam(ModFile) -> Result</name>
- <fsummary>Compile a module for Cover analysis, using an existing beam.</fsummary>
+ <name>compile_beam(ModFiles) -> Result | [Result]</name>
+ <fsummary>Compile one or more modules for Cover analysis, using existing beam(s).</fsummary>
<type>
+ <v>ModFiles = ModFile | [ModFile]</v>
<v>ModFile = Module | BeamFile</v>
<v>&nbsp;Module = atom()</v>
<v>&nbsp;BeamFile = string()</v>
@@ -229,6 +234,9 @@
returned.</p>
<p><c>{error,BeamFile}</c> is returned if the compiled code
can not be loaded on the node.</p>
+ <p>If a list of <c>ModFiles</c> is given as input, a list
+ of <c>Result</c> will be returned. The order of the returned
+ list is undefined.</p>
</desc>
</func>
<func>
@@ -251,16 +259,21 @@
</desc>
</func>
<func>
- <name>analyse(Module) -> {ok,Answer} | {error,Error}</name>
- <name>analyse(Module, Analysis) -> {ok,Answer} | {error,Error}</name>
- <name>analyse(Module, Level) -> {ok,Answer} | {error,Error}</name>
- <name>analyse(Module, Analysis, Level) -> {ok,Answer} | {error,Error}</name>
- <fsummary>Analyse a Cover compiled module.</fsummary>
+ <name>analyse() -> {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse(Modules) -> OneResult | {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse(Analysis) -> {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse(Level) -> {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse(Modules, Analysis) -> OneResult | {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse(Modules, Level) -> OneResult | {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse(Analysis, Level) -> {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse(Modules, Analysis, Level) -> OneResult | {result,Ok,Fail} | {error,not_main_node}</name>
+ <fsummary>Analyse one or more Cover compiled modules.</fsummary>
<type>
- <v>Module = atom()</v>
+ <v>Modules = Module | [Module]</v>
+ <v>Module = atom() </v>
<v>Analysis = coverage | calls</v>
<v>Level = line | clause | function | module</v>
- <v>Answer = {Module,Value} | [{Item,Value}]</v>
+ <v>OneResult = {ok,{Module,Value}} | {ok,[{Item,Value}]} | {error, Error}</v>
<v>&nbsp;Item = Line | Clause | Function</v>
<v>&nbsp;&nbsp;Line = {M,N}</v>
<v>&nbsp;&nbsp;Clause = {M,F,A,C}</v>
@@ -269,49 +282,67 @@
<v>&nbsp;&nbsp;&nbsp;N = A = C = integer()</v>
<v>&nbsp;Value = {Cov,NotCov} | Calls</v>
<v>&nbsp;&nbsp;Cov = NotCov = Calls = integer()</v>
- <v>Error = {not_cover_compiled,Module} | not_main_node</v>
+ <v>&nbsp;Error = {not_cover_compiled,Module}</v>
+ <v>Ok = [{Module,Value}] | [{Item,Value}]</v>
+ <v>Fail = [Error]</v>
</type>
<desc>
- <p>Performs analysis of a Cover compiled module <c>Module</c>, as
+ <p>Performs analysis of one or more Cover compiled modules, as
specified by <c>Analysis</c> and <c>Level</c> (see above), by
examining the contents of the internal database.</p>
<p><c>Analysis</c> defaults to <c>coverage</c> and <c>Level</c>
defaults to <c>function</c>.</p>
- <p>If <c>Module</c> is not Cover compiled, the function returns
- <c>{error,{not_cover_compiled,Module}}</c>.</p>
- <p>HINT: It is possible to issue multiple analyse_to_file commands at
- the same time. </p>
+ <p>If <c>Modules</c> is an atom (one module), the return will
+ be <c>OneResult</c>, else the return will be
+ <c>{result,Ok,Fail}</c>.</p>
+ <p>If <c>Modules</c> is not given, all modules that have data
+ in the cover data table, are analysed. Note that this
+ includes both cover compiled modules and imported
+ modules.</p>
+ <p>If a given module is not Cover compiled, this is indicated
+ by the error reason <c>{not_cover_compiled,Module}</c>.</p>
</desc>
</func>
<func>
- <name>analyse_to_file(Module) -> </name>
- <name>analyse_to_file(Module,Options) -> </name>
- <name>analyse_to_file(Module, OutFile) -> </name>
- <name>analyse_to_file(Module, OutFile, Options) -> {ok,OutFile} | {error,Error}</name>
- <fsummary>Detailed coverage analysis of a Cover compiled module.</fsummary>
+ <name>analyse_to_file() -> {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse_to_file(Modules) -> Answer | {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse_to_file(Options) -> {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse_to_file(Modules,Options) -> Answer | {result,Ok,Fail} | {error,not_main_node}</name>
+ <fsummary>Detailed coverage analysis of one or more Cover compiled modules.</fsummary>
<type>
+ <v>Modules = Module | [Module]</v>
<v>Module = atom()</v>
- <v>OutFile = string()</v>
+ <v>OutFile = OutDir = string()</v>
<v>Options = [Option]</v>
- <v>Option = html</v>
- <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | no_source_code_found | not_main_node</v>
+ <v>Option = html | {outfile,OutFile} | {outdir,OutDir}</v>
+ <v>Answer = {ok,OutFile} | {error,Error}</v>
+ <v>Ok = [OutFile]</v>
+ <v>Fail = [Error]</v>
+ <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | {no_source_code_found,Module}</v>
<v>&nbsp;File = string()</v>
<v>&nbsp;Reason = term()</v>
</type>
<desc>
- <p>Makes a copy <c>OutFile</c> of the source file for a module
- <c>Module</c>, where it for each executable line is specified
+ <p>Makes copies of the source file for the given modules,
+ where it for each executable line is specified
how many times it has been executed.</p>
<p>The output file <c>OutFile</c> defaults to
<c>Module.COVER.out</c>, or <c>Module.COVER.html</c> if the
option <c>html</c> was used.</p>
- <p>If <c>Module</c> is not Cover compiled, the function returns
- <c>{error,{not_cover_compiled,Module}}</c>.</p>
+ <p>If <c>Modules</c> is an atom (one module), the return will
+ be <c>Answer</c>, else the return will be a
+ list, <c>{result,Ok,Fail}</c>.</p>
+ <p>If <c>Modules</c> is not given, all modules that have data
+ in the cover data table, are analysed. Note that this
+ includes both cover compiled modules and imported
+ modules.</p>
+ <p>If a module is not Cover compiled, this is indicated by the
+ error reason <c>{not_cover_compiled,Module}</c>.</p>
<p>If the source file and/or the output file cannot be opened using
<c>file:open/2</c>, the function returns
<c>{error,{file,File,Reason}}</c> where <c>File</c> is the file
name and <c>Reason</c> is the error reason.</p>
- <p>If the module was cover compiled from the <c>.beam</c>
+ <p>If a module was cover compiled from the <c>.beam</c>
file, i.e. using <c>compile_beam/1</c> or
<c>compile_beam_directory/0,1</c>, it is assumed that the
source code can be found in the same directory as the
@@ -322,10 +353,8 @@
joining <c>../src</c> and the tail of the compiled path
below a trailing <c>src</c> component, then the compiled
path itself.
- If no source code is found,
- <c>{error,no_source_code_found}</c> is returned.</p>
- <p>HINT: It is possible to issue multiple analyse_to_file commands at
- the same time. </p>
+ If no source code is found, this is indicated by the error reason
+ <c>{no_source_code_found,Module}</c>.</p>
</desc>
</func>
<func>
@@ -339,7 +368,7 @@
<v>OutFile = string()</v>
<v>Options = [Option]</v>
<v>Option = html</v>
- <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | no_source_code_found | not_main_node</v>
+ <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | {no_source_code_found,Module} | not_main_node</v>
<v>&nbsp;File = string()</v>
<v>&nbsp;Reason = term()</v>
</type>
diff --git a/lib/tools/src/cover.erl b/lib/tools/src/cover.erl
index 31754015f7..6c32c47069 100644
--- a/lib/tools/src/cover.erl
+++ b/lib/tools/src/cover.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -77,8 +77,11 @@
compile/1, compile/2, compile_module/1, compile_module/2,
compile_directory/0, compile_directory/1, compile_directory/2,
compile_beam/1, compile_beam_directory/0, compile_beam_directory/1,
- analyse/1, analyse/2, analyse/3, analyze/1, analyze/2, analyze/3,
+ analyse/0, analyse/1, analyse/2, analyse/3,
+ analyze/0, analyze/1, analyze/2, analyze/3,
+ analyse_to_file/0,
analyse_to_file/1, analyse_to_file/2, analyse_to_file/3,
+ analyze_to_file/0,
analyze_to_file/1, analyze_to_file/2, analyze_to_file/3,
async_analyse_to_file/1,async_analyse_to_file/2,
async_analyse_to_file/3, async_analyze_to_file/1,
@@ -109,6 +112,7 @@
line = '_' % integer()
}).
-define(BUMP_REC_NAME,bump).
+-define(CHUNK_SIZE, 20000).
-record(vars, {module, % atom() Module name
@@ -181,10 +185,11 @@ start(Node) when is_atom(Node) ->
start(Nodes) ->
call({start_nodes,remove_myself(Nodes,[])}).
-%% compile(ModFile) ->
-%% compile(ModFile, Options) ->
-%% compile_module(ModFile) -> Result
-%% compile_module(ModFile, Options) -> Result
+%% compile(ModFiles) ->
+%% compile(ModFiles, Options) ->
+%% compile_module(ModFiles) -> Result
+%% compile_module(ModFiles, Options) -> Result
+%% ModFiles = ModFile | [ModFile]
%% ModFile = Module | File
%% Module = atom()
%% File = string()
@@ -198,18 +203,27 @@ compile(ModFile, Options) ->
compile_module(ModFile) when is_atom(ModFile);
is_list(ModFile) ->
compile_module(ModFile, []).
-compile_module(Module, Options) when is_atom(Module), is_list(Options) ->
- compile_module(atom_to_list(Module), Options);
-compile_module(File, Options) when is_list(File), is_list(Options) ->
- WithExt = case filename:extension(File) of
- ".erl" ->
- File;
- _ ->
- File++".erl"
- end,
- AbsFile = filename:absname(WithExt),
- [R] = compile_modules([AbsFile], Options),
- R.
+compile_module(ModFile, Options) when is_atom(ModFile);
+ is_list(ModFile), is_integer(hd(ModFile)) ->
+ [R] = compile_module([ModFile], Options),
+ R;
+compile_module(ModFiles, Options) when is_list(Options) ->
+ AbsFiles =
+ [begin
+ File =
+ case ModFile of
+ _ when is_atom(ModFile) -> atom_to_list(ModFile);
+ _ when is_list(ModFile) -> ModFile
+ end,
+ WithExt = case filename:extension(File) of
+ ".erl" ->
+ File;
+ _ ->
+ File++".erl"
+ end,
+ filename:absname(WithExt)
+ end || ModFile <- ModFiles],
+ compile_modules(AbsFiles, Options).
%% compile_directory() ->
%% compile_directory(Dir) ->
@@ -240,13 +254,14 @@ compile_directory(Dir, Options) when is_list(Dir), is_list(Options) ->
compile_modules(Files,Options) ->
Options2 = filter_options(Options),
- compile_modules(Files,Options2,[]).
+ %% compile_modules(Files,Options2,[]).
+ call({compile, Files, Options2}).
-compile_modules([File|Files], Options, Result) ->
- R = call({compile, File, Options}),
- compile_modules(Files,Options,[R|Result]);
-compile_modules([],_Opts,Result) ->
- lists:reverse(Result).
+%% compile_modules([File|Files], Options, Result) ->
+%% R = call({compile, File, Options}),
+%% compile_modules(Files,Options,[R|Result]);
+%% compile_modules([],_Opts,Result) ->
+%% lists:reverse(Result).
filter_options(Options) ->
lists:filter(fun(Option) ->
@@ -264,30 +279,17 @@ filter_options(Options) ->
%% ModFile - see compile/1
%% Result - see compile/1
%% Reason = non_existing | already_cover_compiled
-compile_beam(Module) when is_atom(Module) ->
- case code:which(Module) of
- non_existing ->
+compile_beam(ModFile0) when is_atom(ModFile0);
+ is_list(ModFile0), is_integer(hd(ModFile0)) ->
+ case compile_beams([ModFile0]) of
+ [{error,{non_existing,_}}] ->
+ %% Backwards compatibility
{error,non_existing};
- ?TAG ->
- compile_beam(Module,?TAG);
- File ->
- compile_beam(Module,File)
+ [Result] ->
+ Result
end;
-compile_beam(File) when is_list(File) ->
- {WithExt,WithoutExt}
- = case filename:rootname(File,".beam") of
- File ->
- {File++".beam",File};
- Rootname ->
- {File,Rootname}
- end,
- AbsFile = filename:absname(WithExt),
- Module = list_to_atom(filename:basename(WithoutExt)),
- compile_beam(Module,AbsFile).
-
-compile_beam(Module,File) ->
- call({compile_beam,Module,File}).
-
+compile_beam(ModFiles) when is_list(ModFiles) ->
+ compile_beams(ModFiles).
%% compile_beam_directory(Dir) -> [Result] | {error,Reason}
@@ -312,19 +314,52 @@ compile_beam_directory(Dir) when is_list(Dir) ->
Error
end.
-compile_beams(Files) ->
- compile_beams(Files,[]).
-compile_beams([File|Files],Result) ->
- R = compile_beam(File),
- compile_beams(Files,[R|Result]);
-compile_beams([],Result) ->
- lists:reverse(Result).
+compile_beams(ModFiles0) ->
+ ModFiles = get_mods_and_beams(ModFiles0,[]),
+ call({compile_beams,ModFiles}).
-
-%% analyse(Module) ->
-%% analyse(Module, Analysis) ->
-%% analyse(Module, Level) ->
-%% analyse(Module, Analysis, Level) -> {ok,Answer} | {error,Error}
+get_mods_and_beams([Module|ModFiles],Acc) when is_atom(Module) ->
+ case code:which(Module) of
+ non_existing ->
+ get_mods_and_beams(ModFiles,[{error,{non_existing,Module}}|Acc]);
+ File ->
+ get_mods_and_beams([{Module,File}|ModFiles],Acc)
+ end;
+get_mods_and_beams([File|ModFiles],Acc) when is_list(File) ->
+ {WithExt,WithoutExt}
+ = case filename:rootname(File,".beam") of
+ File ->
+ {File++".beam",File};
+ Rootname ->
+ {File,Rootname}
+ end,
+ AbsFile = filename:absname(WithExt),
+ Module = list_to_atom(filename:basename(WithoutExt)),
+ get_mods_and_beams([{Module,AbsFile}|ModFiles],Acc);
+get_mods_and_beams([{Module,File}|ModFiles],Acc) ->
+ %% Check for duplicates
+ case lists:keyfind(Module,2,Acc) of
+ {ok,Module,File} ->
+ %% Duplicate, but same file so ignore
+ get_mods_and_beams(ModFiles,Acc);
+ {ok,Module,_OtherFile} ->
+ %% Duplicate and differnet file - error
+ get_mods_and_beams(ModFiles,[{error,{duplicate,Module}}|Acc]);
+ _ ->
+ get_mods_and_beams(ModFiles,[{ok,Module,File}|Acc])
+ end;
+get_mods_and_beams([],Acc) ->
+ lists:reverse(Acc).
+
+
+%% analyse(Modules) ->
+%% analyse(Analysis) ->
+%% analyse(Level) ->
+%% analyse(Modules, Analysis) ->
+%% analyse(Modules, Level) ->
+%% analyse(Analysis, Level)
+%% analyse(Modules, Analysis, Level) -> {ok,Answer} | {error,Error}
+%% Modules = Module | [Module]
%% Module = atom()
%% Analysis = coverage | calls
%% Level = line | clause | function | module
@@ -337,48 +372,74 @@ compile_beams([],Result) ->
%% N = A = C = integer()
%% Value = {Cov,NotCov} | Calls
%% Cov = NotCov = Calls = integer()
-%% Error = {not_cover_compiled,Module}
+%% Error = {not_cover_compiled,Module} | not_main_node
+-define(is_analysis(__A__),
+ (__A__=:=coverage orelse __A__=:=calls)).
+-define(is_level(__L__),
+ (__L__=:=line orelse __L__=:=clause orelse
+ __L__=:=function orelse __L__=:=module)).
+analyse() ->
+ analyse('_').
+
+analyse(Analysis) when ?is_analysis(Analysis) ->
+ analyse('_', Analysis);
+analyse(Level) when ?is_level(Level) ->
+ analyse('_', Level);
analyse(Module) ->
analyse(Module, coverage).
-analyse(Module, Analysis) when Analysis=:=coverage; Analysis=:=calls ->
+
+analyse(Analysis, Level) when ?is_analysis(Analysis) andalso
+ ?is_level(Level) ->
+ analyse('_', Analysis, Level);
+analyse(Module, Analysis) when ?is_analysis(Analysis) ->
analyse(Module, Analysis, function);
-analyse(Module, Level) when Level=:=line; Level=:=clause; Level=:=function;
- Level=:=module ->
+analyse(Module, Level) when ?is_level(Level) ->
analyse(Module, coverage, Level).
-analyse(Module, Analysis, Level) when is_atom(Module),
- Analysis=:=coverage; Analysis=:=calls,
- Level=:=line; Level=:=clause;
- Level=:=function; Level=:=module ->
+
+analyse(Module, Analysis, Level) when ?is_analysis(Analysis),
+ ?is_level(Level) ->
call({{analyse, Analysis, Level}, Module}).
+analyze() -> analyse( ).
analyze(Module) -> analyse(Module).
analyze(Module, Analysis) -> analyse(Module, Analysis).
analyze(Module, Analysis, Level) -> analyse(Module, Analysis, Level).
-%% analyse_to_file(Module) ->
-%% analyse_to_file(Module, Options) ->
-%% analyse_to_file(Module, OutFile) ->
-%% analyse_to_file(Module, OutFile, Options) -> {ok,OutFile} | {error,Error}
+%% analyse_to_file() ->
+%% analyse_to_file(Modules) ->
+%% analyse_to_file(Modules, Options) ->
+%% Modules = Module | [Module]
%% Module = atom()
%% OutFile = string()
%% Options = [Option]
-%% Option = html
+%% Option = html | {outfile,filename()} | {outdir,dirname()}
%% Error = {not_cover_compiled,Module} | no_source_code_found |
%% {file,File,Reason}
%% File = string()
%% Reason = term()
-analyse_to_file(Module) when is_atom(Module) ->
- analyse_to_file(Module, outfilename(Module,[]), []).
-analyse_to_file(Module, []) when is_atom(Module) ->
- analyse_to_file(Module, outfilename(Module,[]), []);
-analyse_to_file(Module, Options) when is_atom(Module),
- is_list(Options), is_atom(hd(Options)) ->
- analyse_to_file(Module, outfilename(Module,Options), Options);
-analyse_to_file(Module, OutFile) when is_atom(Module), is_list(OutFile) ->
- analyse_to_file(Module, OutFile, []).
-analyse_to_file(Module, OutFile, Options) when is_atom(Module), is_list(OutFile) ->
- call({{analyse_to_file, OutFile, Options}, Module}).
-
+%%
+%% Kept for backwards compatibility:
+%% analyse_to_file(Modules, OutFile) ->
+%% analyse_to_file(Modules, OutFile, Options) -> {ok,OutFile} | {error,Error}
+analyse_to_file() ->
+ analyse_to_file('_').
+analyse_to_file(Arg) ->
+ case is_options(Arg) of
+ true ->
+ analyse_to_file('_',Arg);
+ false ->
+ analyse_to_file(Arg,[])
+ end.
+analyse_to_file(Module, OutFile) when is_list(OutFile), is_integer(hd(OutFile)) ->
+ %% Kept for backwards compatibility
+ analyse_to_file(Module, [{outfile,OutFile}]);
+analyse_to_file(Module, Options) when is_list(Options) ->
+ call({{analyse_to_file, Options}, Module}).
+analyse_to_file(Module, OutFile, Options) when is_list(OutFile) ->
+ %% Kept for backwards compatibility
+ analyse_to_file(Module,[{outfile,OutFile}|Options]).
+
+analyze_to_file() -> analyse_to_file().
analyze_to_file(Module) -> analyse_to_file(Module).
analyze_to_file(Module, OptOrOut) -> analyse_to_file(Module, OptOrOut).
analyze_to_file(Module, OutFile, Options) ->
@@ -391,6 +452,15 @@ async_analyse_to_file(Module, OutFileOrOpts) ->
async_analyse_to_file(Module, OutFile, Options) ->
do_spawn(?MODULE, analyse_to_file, [Module, OutFile, Options]).
+is_options([html]) ->
+ true; % this is not 100% safe - could be a module named html...
+is_options([html|Opts]) ->
+ is_options(Opts);
+is_options([{Opt,_}|_]) when Opt==outfile; Opt==outdir ->
+ true;
+is_options(_) ->
+ false.
+
do_spawn(M,F,A) ->
spawn_link(fun() ->
case apply(M,F,A) of
@@ -408,13 +478,16 @@ async_analyze_to_file(Module, OutFileOrOpts) ->
async_analyze_to_file(Module, OutFile, Options) ->
async_analyse_to_file(Module, OutFile, Options).
-outfilename(Module,Opts) ->
- case lists:member(html,Opts) of
- true ->
- atom_to_list(Module)++".COVER.html";
- false ->
- atom_to_list(Module)++".COVER.out"
- end.
+outfilename(undefined, Module, HTML) ->
+ outfilename(Module, HTML);
+outfilename(OutDir, Module, HTML) ->
+ filename:join(OutDir, outfilename(Module, HTML)).
+
+outfilename(Module, true) ->
+ atom_to_list(Module)++".COVER.html";
+outfilename(Module, false) ->
+ atom_to_list(Module)++".COVER.out".
+
%% export(File)
%% export(File,Module) -> ok | {error,Reason}
@@ -559,7 +632,7 @@ init_main(Starter) ->
,{write_concurrency, true}
]),
ets:new(?COVER_CLAUSE_TABLE, [set, public, named_table]),
- ets:new(?BINARY_TABLE, [set, named_table]),
+ ets:new(?BINARY_TABLE, [set, public, named_table]),
ets:new(?COLLECTION_TABLE, [set, public, named_table]),
ets:new(?COLLECTION_CLAUSE_TABLE, [set, public, named_table]),
net_kernel:monitor_nodes(true),
@@ -573,55 +646,19 @@ main_process_loop(State) ->
reply(From, {ok,StartedNodes}),
main_process_loop(State1);
- {From, {compile, File, Options}} ->
- case do_compile(File, Options) of
- {ok, Module} ->
- remote_load_compiled(State#main_state.nodes,[{Module,File}]),
- reply(From, {ok, Module}),
- Compiled = add_compiled(Module, File,
- State#main_state.compiled),
- Imported = remove_imported(Module,State#main_state.imported),
- NewState = State#main_state{compiled = Compiled,
- imported = Imported},
- %% This module (cover) could have been reloaded. Make
- %% sure we run the new code.
- ?MODULE:main_process_loop(NewState);
- error ->
- reply(From, {error, File}),
- main_process_loop(State)
- end;
+ {From, {compile, Files, Options}} ->
+ {R,S} = do_compile(Files, Options, State),
+ reply(From,R),
+ %% This module (cover) could have been reloaded. Make
+ %% sure we run the new code.
+ ?MODULE:main_process_loop(S);
- {From, {compile_beam, Module, BeamFile0}} ->
- Compiled0 = State#main_state.compiled,
- case get_beam_file(Module,BeamFile0,Compiled0) of
- {ok,BeamFile} ->
- UserOptions = get_compile_options(Module,BeamFile),
- {Reply,Compiled} =
- case do_compile_beam(Module,BeamFile,UserOptions) of
- {ok, Module} ->
- remote_load_compiled(State#main_state.nodes,
- [{Module,BeamFile}]),
- C = add_compiled(Module,BeamFile,Compiled0),
- {{ok,Module},C};
- error ->
- {{error, BeamFile}, Compiled0};
- {error,Reason} -> % no abstract code
- {{error, {Reason, BeamFile}}, Compiled0}
- end,
- reply(From,Reply),
- Imported = remove_imported(Module,State#main_state.imported),
- NewState = State#main_state{compiled = Compiled,
- imported = Imported},
- %% This module (cover) could have been reloaded. Make
- %% sure we run the new code.
- ?MODULE:main_process_loop(NewState);
- {error,no_beam} ->
- %% The module has first been compiled from .erl, and now
- %% someone tries to compile it from .beam
- reply(From,
- {error,{already_cover_compiled,no_beam_found,Module}}),
- main_process_loop(State)
- end;
+ {From, {compile_beams, ModsAndFiles}} ->
+ {R,S} = do_compile_beams(ModsAndFiles,State),
+ reply(From,R),
+ %% This module (cover) could have been reloaded. Make
+ %% sure we run the new code.
+ ?MODULE:main_process_loop(S);
{From, {export,OutFile,Module}} ->
spawn(fun() ->
@@ -706,6 +743,16 @@ main_process_loop(State) ->
unregister(?SERVER),
reply(From, ok);
+ {From, {{analyse, Analysis, Level}, '_'}} ->
+ R = analyse_all(Analysis, Level, State),
+ reply(From, R),
+ main_process_loop(State);
+
+ {From, {{analyse, Analysis, Level}, Modules}} when is_list(Modules) ->
+ R = analyse_list(Modules, Analysis, Level, State),
+ reply(From, R),
+ main_process_loop(State);
+
{From, {{analyse, Analysis, Level}, Module}} ->
S = try
Loaded = is_loaded(Module, State),
@@ -722,15 +769,23 @@ main_process_loop(State) ->
end,
main_process_loop(S);
- {From, {{analyse_to_file, OutFile, Opts},Module}} ->
+ {From, {{analyse_to_file, Opts},'_'}} ->
+ R = analyse_all_to_file(Opts, State),
+ reply(From,R),
+ main_process_loop(State);
+
+ {From, {{analyse_to_file, Opts},Modules}} when is_list(Modules) ->
+ R = analyse_list_to_file(Modules, Opts, State),
+ reply(From,R),
+ main_process_loop(State);
+
+ {From, {{analyse_to_file, Opts},Module}} ->
S = try
Loaded = is_loaded(Module, State),
spawn(fun() ->
- ?SPAWN_DBG(analyse_to_file,
- {Module,OutFile, Opts}),
+ ?SPAWN_DBG(analyse_to_file,{Module,Opts}),
do_parallel_analysis_to_file(
- Module, OutFile, Opts,
- Loaded, From, State)
+ Module, Opts, Loaded, From, State)
end),
State
catch throw:Reason ->
@@ -848,11 +903,15 @@ remote_process_loop(State) ->
{remote,collect,Module,CollectorPid} ->
self() ! {remote,collect,Module,CollectorPid, ?SERVER};
- {remote,collect,Module,CollectorPid,From} ->
+ {remote,collect,Modules0,CollectorPid,From} ->
+ Modules = case Modules0 of
+ '_' -> [M || {M,_} <- State#remote_state.compiled];
+ _ -> Modules0
+ end,
spawn(fun() ->
?SPAWN_DBG(remote_collect,
- {Module, CollectorPid, From}),
- do_collect(Module, CollectorPid, From)
+ {Modules, CollectorPid, From}),
+ do_collect(Modules, CollectorPid, From)
end),
remote_process_loop(State);
@@ -893,39 +952,51 @@ remote_process_loop(State) ->
end.
-do_collect(Module, CollectorPid, From) ->
- AllMods =
- case Module of
- '_' -> ets:tab2list(?COVER_CLAUSE_TABLE);
- _ -> ets:lookup(?COVER_CLAUSE_TABLE, Module)
- end,
-
- %% Sending clause by clause in order to avoid large lists
+do_collect(Modules, CollectorPid, From) ->
pmap(
- fun({_Mod,Clauses}) ->
- lists:map(fun(Clause) ->
- send_collected_data(Clause, CollectorPid)
- end,Clauses)
- end,AllMods),
+ fun(Module) ->
+ Pattern = {#bump{module=Module, _='_'}, '$1'},
+ MatchSpec = [{Pattern,[{'=/=','$1',0}],['$_']}],
+ Match = ets:select(?COVER_TABLE,MatchSpec,?CHUNK_SIZE),
+ send_chunks(Match, CollectorPid, [])
+ end,Modules),
CollectorPid ! done,
remote_reply(From, ok).
-send_collected_data({M,F,A,C,_L}, CollectorPid) ->
- Pattern =
- {#bump{module=M, function=F, arity=A, clause=C}, '_'},
- Bumps = ets:match_object(?COVER_TABLE, Pattern),
- %% Reset
- lists:foreach(fun({Bump,_N}) ->
- ets:insert(?COVER_TABLE, {Bump,0})
- end,
- Bumps),
- CollectorPid ! {chunk,Bumps}.
+send_chunks('$end_of_table', _CollectorPid, Mons) ->
+ get_downs(Mons);
+send_chunks({Chunk,Continuation}, CollectorPid, Mons) ->
+ Mon = spawn_monitor(
+ fun() ->
+ lists:foreach(fun({Bump,_N}) ->
+ ets:insert(?COVER_TABLE, {Bump,0})
+ end,
+ Chunk) end),
+ send_chunk(CollectorPid,Chunk),
+ send_chunks(ets:select(Continuation), CollectorPid, [Mon|Mons]).
+
+send_chunk(CollectorPid,Chunk) ->
+ CollectorPid ! {chunk,Chunk,self()},
+ receive continue -> ok end.
+
+get_downs([]) ->
+ ok;
+get_downs(Mons) ->
+ receive
+ {'DOWN', Ref, _Type, Pid, _Reason} = Down ->
+ case lists:member({Pid,Ref},Mons) of
+ true ->
+ get_downs(lists:delete({Pid,Ref},Mons));
+ false ->
+ %% This should be handled somewhere else
+ self() ! Down,
+ get_downs(Mons)
+ end
+ end.
-reload_originals([{Module,_File}|Compiled]) ->
- do_reload_original(Module),
- reload_originals(Compiled);
-reload_originals([]) ->
- ok.
+reload_originals(Compiled) ->
+ Modules = [M || {M,_} <- Compiled],
+ pmap(fun do_reload_original/1, Modules).
do_reload_original(Module) ->
case code:which(Module) of
@@ -1068,15 +1139,40 @@ remote_load_compiled(_Nodes, [], [], _ModNum) ->
ok;
remote_load_compiled(Nodes, Compiled, Acc, ModNum)
when Compiled == []; ModNum == ?MAX_MODS ->
+ RemoteLoadData = get_downs_r(Acc),
lists:foreach(
fun(Node) ->
- remote_call(Node,{remote,load_compiled,Acc})
+ remote_call(Node,{remote,load_compiled,RemoteLoadData})
end,
Nodes),
remote_load_compiled(Nodes, Compiled, [], 0);
remote_load_compiled(Nodes, [MF | Rest], Acc, ModNum) ->
remote_load_compiled(
- Nodes, Rest, [get_data_for_remote_loading(MF) | Acc], ModNum + 1).
+ Nodes, Rest,
+ [spawn_job_r(fun() -> get_data_for_remote_loading(MF) end) | Acc],
+ ModNum + 1).
+
+spawn_job_r(Fun) ->
+ spawn_monitor(fun() -> exit(Fun()) end).
+
+get_downs_r([]) ->
+ [];
+get_downs_r(Mons) ->
+ receive
+ {'DOWN', Ref, _Type, Pid, R={_,_,_,_}} ->
+ [R|get_downs_r(lists:delete({Pid,Ref},Mons))];
+ {'DOWN', Ref, _Type, Pid, Reason} = Down ->
+ case lists:member({Pid,Ref},Mons) of
+ true ->
+ %% Something went really wrong - don't hang!
+ exit(Reason);
+ false ->
+ %% This should be handled somewhere else
+ self() ! Down,
+ get_downs_r(Mons)
+ end
+ end.
+
%% Read all data needed for loading a cover compiled module on a remote node
%% Binary is the beam code for the module and InitialTable is the initial
@@ -1113,11 +1209,11 @@ remote_reset(Module,Nodes) ->
Nodes).
%% Collect data from remote nodes - used for analyse or stop(Node)
-remote_collect(Module,Nodes,Stop) ->
+remote_collect(Modules,Nodes,Stop) ->
pmap(fun(Node) ->
?SPAWN_DBG(remote_collect,
- {Module, Nodes, Stop}),
- do_collection(Node, Module, Stop)
+ {Modules, Nodes, Stop}),
+ do_collection(Node, Modules, Stop)
end,
Nodes).
@@ -1138,8 +1234,9 @@ do_collection(Node, Module, Stop) ->
collector_proc() ->
?SPAWN_DBG(collector_proc, []),
receive
- {chunk,Chunk} ->
+ {chunk,Chunk,From} ->
insert_in_collection_table(Chunk),
+ From ! continue,
collector_proc();
done ->
ok
@@ -1259,6 +1356,19 @@ add_compiled(Module, File, [H|Compiled]) ->
add_compiled(Module, File, []) ->
[{Module,File}].
+are_loaded([Module|Modules], State, Loaded, Imported, Error) ->
+ try is_loaded(Module,State) of
+ {loaded,File} ->
+ are_loaded(Modules, State, [{Module,File}|Loaded], Imported, Error);
+ {imported,File,_} ->
+ are_loaded(Modules, State, Loaded, [{Module,File}|Imported], Error)
+ catch throw:_ ->
+ are_loaded(Modules, State, Loaded, Imported,
+ [{not_cover_compiled,Module}|Error])
+ end;
+are_loaded([], _State, Loaded, Imported, Error) ->
+ {Loaded, Imported, Error}.
+
is_loaded(Module, State) ->
case get_file(Module, State#main_state.compiled) of
{ok, File} ->
@@ -1333,18 +1443,75 @@ get_compiled_still_loaded(Nodes,Compiled0) ->
%%%--Compilation---------------------------------------------------------
-%% do_compile(File, Options) -> {ok,Module} | {error,Error}
-do_compile(File, UserOptions) ->
+do_compile_beams(ModsAndFiles, State) ->
+ Result0 = pmap(fun({ok,Module,File}) ->
+ do_compile_beam(Module,File,State);
+ (Error) ->
+ Error
+ end,
+ ModsAndFiles),
+ Compiled = [{M,F} || {ok,M,F} <- Result0],
+ remote_load_compiled(State#main_state.nodes,Compiled),
+ fix_state_and_result(Result0,State,[]).
+
+do_compile_beam(Module,BeamFile0,State) ->
+ case get_beam_file(Module,BeamFile0,State#main_state.compiled) of
+ {ok,BeamFile} ->
+ UserOptions = get_compile_options(Module,BeamFile),
+ case do_compile_beam1(Module,BeamFile,UserOptions) of
+ {ok, Module} ->
+ {ok,Module,BeamFile};
+ error ->
+ {error, BeamFile};
+ {error,Reason} -> % no abstract code
+ {error, {Reason, BeamFile}}
+ end;
+ {error,no_beam} ->
+ %% The module has first been compiled from .erl, and now
+ %% someone tries to compile it from .beam
+ {error,{already_cover_compiled,no_beam_found,Module}}
+ end.
+
+fix_state_and_result([{ok,Module,BeamFile}|Rest],State,Acc) ->
+ Compiled = add_compiled(Module,BeamFile,State#main_state.compiled),
+ Imported = remove_imported(Module,State#main_state.imported),
+ NewState = State#main_state{compiled=Compiled,imported=Imported},
+ fix_state_and_result(Rest,NewState,[{ok,Module}|Acc]);
+fix_state_and_result([Error|Rest],State,Acc) ->
+ fix_state_and_result(Rest,State,[Error|Acc]);
+fix_state_and_result([],State,Acc) ->
+ {lists:reverse(Acc),State}.
+
+
+do_compile(Files, Options, State) ->
+ Result0 = pmap(fun(File) ->
+ do_compile(File, Options)
+ end,
+ Files),
+ Compiled = [{M,F} || {ok,M,F} <- Result0],
+ remote_load_compiled(State#main_state.nodes,Compiled),
+ fix_state_and_result(Result0,State,[]).
+
+do_compile(File, Options) ->
+ case do_compile1(File, Options) of
+ {ok, Module} ->
+ {ok,Module,File};
+ error ->
+ {error,File}
+ end.
+
+%% do_compile1(File, Options) -> {ok,Module} | error
+do_compile1(File, UserOptions) ->
Options = [debug_info,binary,report_errors,report_warnings] ++ UserOptions,
case compile:file(File, Options) of
{ok, Module, Binary} ->
- do_compile_beam(Module,Binary,UserOptions);
+ do_compile_beam1(Module,Binary,UserOptions);
error ->
error
end.
%% Beam is a binary or a .beam file name
-do_compile_beam(Module,Beam,UserOptions) ->
+do_compile_beam1(Module,Beam,UserOptions) ->
%% Clear database
do_clear(Module),
@@ -1915,10 +2082,21 @@ common_elems(L1, L2) ->
collect(Nodes) ->
%% local node
AllClauses = ets:tab2list(?COVER_CLAUSE_TABLE),
- pmap(fun move_modules/1,AllClauses),
-
+ Mon1 = spawn_monitor(fun() -> pmap(fun move_modules/1,AllClauses) end),
+
+ %% remote nodes
+ Mon2 = spawn_monitor(fun() -> remote_collect('_',Nodes,false) end),
+ get_downs([Mon1,Mon2]).
+
+%% Collect data for a list of modules
+collect(Modules,Nodes) ->
+ MS = [{{'$1','_'},[{'==','$1',M}],['$_']} || M <- Modules],
+ Clauses = ets:select(?COVER_CLAUSE_TABLE,MS),
+ Mon1 = spawn_monitor(fun() -> pmap(fun move_modules/1,Clauses) end),
+
%% remote nodes
- remote_collect('_',Nodes,false).
+ Mon2 = spawn_monitor(fun() -> remote_collect('_',Nodes,false) end),
+ get_downs([Mon1,Mon2]).
%% Collect data for one module
collect(Module,Clauses,Nodes) ->
@@ -1926,25 +2104,26 @@ collect(Module,Clauses,Nodes) ->
move_modules({Module,Clauses}),
%% remote nodes
- remote_collect(Module,Nodes,false).
+ remote_collect([Module],Nodes,false).
%% When analysing, the data from the local ?COVER_TABLE is moved to the
%% ?COLLECTION_TABLE. Resetting data in ?COVER_TABLE
move_modules({Module,Clauses}) ->
ets:insert(?COLLECTION_CLAUSE_TABLE,{Module,Clauses}),
- move_clauses(Clauses).
+ Pattern = {#bump{module=Module, _='_'}, '_'},
+ MatchSpec = [{Pattern,[],['$_']}],
+ Match = ets:select(?COVER_TABLE,MatchSpec,?CHUNK_SIZE),
+ do_move_module(Match).
-move_clauses([{M,F,A,C,_L}|Clauses]) ->
- Pattern = {#bump{module=M, function=F, arity=A, clause=C}, '_'},
- Bumps = ets:match_object(?COVER_TABLE,Pattern),
+do_move_module({Bumps,Continuation}) ->
lists:foreach(fun({Key,Val}) ->
ets:insert(?COVER_TABLE, {Key,0}),
insert_in_collection_table(Key,Val)
end,
Bumps),
- move_clauses(Clauses);
-move_clauses([]) ->
+ do_move_module(ets:select(Continuation));
+do_move_module('$end_of_table') ->
ok.
%% Given a .beam file, find the .erl file. Look first in same directory as
@@ -2002,6 +2181,26 @@ splice(BeamDir, SrcFile) ->
revsplit(Path) ->
lists:reverse(filename:split(Path)).
+analyse_list(Modules, Analysis, Level, State) ->
+ {LoadedMF, ImportedMF, Error} = are_loaded(Modules, State, [], [], []),
+ Loaded = [M || {M,_} <- LoadedMF],
+ Imported = [M || {M,_} <- ImportedMF],
+ collect(Loaded, State#main_state.nodes),
+ MS = [{{'$1','_'},[{'==','$1',M}],['$_']} || M <- Loaded ++ Imported],
+ AllClauses = ets:select(?COLLECTION_CLAUSE_TABLE,MS),
+ Fun = fun({Module,Clauses}) ->
+ do_analyse(Module, Analysis, Level, Clauses)
+ end,
+ {result, lists:flatten(pmap(Fun, AllClauses)), Error}.
+
+analyse_all(Analysis, Level, State) ->
+ collect(State#main_state.nodes),
+ AllClauses = ets:tab2list(?COLLECTION_CLAUSE_TABLE),
+ Fun = fun({Module,Clauses}) ->
+ do_analyse(Module, Analysis, Level, Clauses)
+ end,
+ {result, lists:flatten(pmap(Fun, AllClauses)), []}.
+
do_parallel_analysis(Module, Analysis, Level, Loaded, From, State) ->
analyse_info(Module,State#main_state.imported),
C = case Loaded of
@@ -2016,7 +2215,7 @@ do_parallel_analysis(Module, Analysis, Level, Loaded, From, State) ->
Clauses
end,
R = do_analyse(Module, Analysis, Level, C),
- reply(From, R).
+ reply(From, {ok,R}).
%% do_analyse(Module, Analysis, Level, Clauses)-> {ok,Answer} | {error,Error}
%% Clauses = [{Module,Function,Arity,Clause,Lines}]
@@ -2035,37 +2234,44 @@ do_analyse(Module, Analysis, line, _Clauses) ->
{{Module,L}, N}
end
end,
- Answer = lists:keysort(1, lists:map(Fun, Bumps)),
- {ok, Answer};
-do_analyse(_Module, Analysis, clause, Clauses) ->
- Fun = case Analysis of
- coverage ->
- fun({M,F,A,C,Ls}) ->
- Pattern = {#bump{module=M,function=F,arity=A,
- clause=C},0},
- Bumps = ets:match_object(?COLLECTION_TABLE, Pattern),
- NotCov = length(Bumps),
- {{M,F,A,C}, {Ls-NotCov, NotCov}}
- end;
- calls ->
- fun({M,F,A,C,_Ls}) ->
- Pattern = {#bump{module=M,function=F,arity=A,
- clause=C},'_'},
- Bumps = ets:match_object(?COLLECTION_TABLE, Pattern),
- {_Bump, Calls} = hd(lists:keysort(1, Bumps)),
- {{M,F,A,C}, Calls}
- end
- end,
- Answer = lists:map(Fun, Clauses),
- {ok, Answer};
+ lists:keysort(1, lists:map(Fun, Bumps));
+do_analyse(Module, Analysis, clause, _Clauses) ->
+ Pattern = {#bump{module=Module},'_'},
+ Bumps = lists:keysort(1,ets:match_object(?COLLECTION_TABLE, Pattern)),
+ analyse_clause(Analysis,Bumps);
do_analyse(Module, Analysis, function, Clauses) ->
- {ok, ClauseResult} = do_analyse(Module, Analysis, clause, Clauses),
- Result = merge_clauses(ClauseResult, merge_fun(Analysis)),
- {ok, Result};
+ ClauseResult = do_analyse(Module, Analysis, clause, Clauses),
+ merge_clauses(ClauseResult, merge_fun(Analysis));
do_analyse(Module, Analysis, module, Clauses) ->
- {ok, FunctionResult} = do_analyse(Module, Analysis, function, Clauses),
+ FunctionResult = do_analyse(Module, Analysis, function, Clauses),
Result = merge_functions(FunctionResult, merge_fun(Analysis)),
- {ok, {Module,Result}}.
+ {Module,Result}.
+
+analyse_clause(_,[]) ->
+ [];
+analyse_clause(coverage,
+ [{#bump{module=M,function=F,arity=A,clause=C},_}|_]=Bumps) ->
+ analyse_clause_cov(Bumps,{M,F,A,C},0,0,[]);
+analyse_clause(calls,Bumps) ->
+ analyse_clause_calls(Bumps,{x,x,x,x},[]).
+
+analyse_clause_cov([{#bump{module=M,function=F,arity=A,clause=C},N}|Bumps],
+ {M,F,A,C}=Clause,Ls,NotCov,Acc) ->
+ analyse_clause_cov(Bumps,Clause,Ls+1,if N==0->NotCov+1; true->NotCov end,Acc);
+analyse_clause_cov([{#bump{module=M1,function=F1,arity=A1,clause=C1},_}|_]=Bumps,
+ Clause,Ls,NotCov,Acc) ->
+ analyse_clause_cov(Bumps,{M1,F1,A1,C1},0,0,[{Clause,{Ls-NotCov,NotCov}}|Acc]);
+analyse_clause_cov([],Clause,Ls,NotCov,Acc) ->
+ lists:reverse(Acc,[{Clause,{Ls-NotCov,NotCov}}]).
+
+analyse_clause_calls([{#bump{module=M,function=F,arity=A,clause=C},_}|Bumps],
+ {M,F,A,C}=Clause,Acc) ->
+ analyse_clause_calls(Bumps,Clause,Acc);
+analyse_clause_calls([{#bump{module=M1,function=F1,arity=A1,clause=C1},N}|Bumps],
+ _Clause,Acc) ->
+ analyse_clause_calls(Bumps,{M1,F1,A1,C1},[{{M1,F1,A1,C1},N}|Acc]);
+analyse_clause_calls([],_Clause,Acc) ->
+ lists:reverse(Acc).
merge_fun(coverage) ->
fun({Cov1,NotCov1}, {Cov2,NotCov2}) ->
@@ -2094,7 +2300,50 @@ merge_functions([{_MFA,R}|Functions], MFun, Result) ->
merge_functions([], _MFun, Result) ->
Result.
-do_parallel_analysis_to_file(Module, OutFile, Opts, Loaded, From, State) ->
+analyse_list_to_file(Modules, Opts, State) ->
+ {LoadedMF, ImportedMF, Error} = are_loaded(Modules, State, [], [], []),
+ collect([M || {M,_} <- LoadedMF], State#main_state.nodes),
+ OutDir = proplists:get_value(outdir,Opts),
+ HTML = lists:member(html,Opts),
+ Fun = fun({Module,File}) ->
+ OutFile = outfilename(OutDir,Module,HTML),
+ do_analyse_to_file(Module,File,OutFile,HTML,State)
+ end,
+ {Ok,Error1} = split_ok_error(pmap(Fun, LoadedMF++ImportedMF),[],[]),
+ {result,Ok,Error ++ Error1}.
+
+analyse_all_to_file(Opts, State) ->
+ collect(State#main_state.nodes),
+ AllModules = get_all_modules(State),
+ OutDir = proplists:get_value(outdir,Opts),
+ HTML = lists:member(html,Opts),
+ Fun = fun({Module,File}) ->
+ OutFile = outfilename(OutDir,Module,HTML),
+ do_analyse_to_file(Module,File,OutFile,HTML,State)
+ end,
+ {Ok,Error} = split_ok_error(pmap(Fun, AllModules),[],[]),
+ {result,Ok,Error}.
+
+get_all_modules(State) ->
+ get_all_modules(State#main_state.compiled ++ State#main_state.imported,[]).
+get_all_modules([{Module,File}|Rest],Acc) ->
+ get_all_modules(Rest,[{Module,File}|Acc]);
+get_all_modules([{Module,File,_}|Rest],Acc) ->
+ case lists:keymember(Module,1,Acc) of
+ true -> get_all_modules(Rest,Acc);
+ false -> get_all_modules(Rest,[{Module,File}|Acc])
+ end;
+get_all_modules([],Acc) ->
+ Acc.
+
+split_ok_error([{ok,R}|Result],Ok,Error) ->
+ split_ok_error(Result,[R|Ok],Error);
+split_ok_error([{error,R}|Result],Ok,Error) ->
+ split_ok_error(Result,Ok,[R|Error]);
+split_ok_error([],Ok,Error) ->
+ {Ok,Error}.
+
+do_parallel_analysis_to_file(Module, Opts, Loaded, From, State) ->
File = case Loaded of
{loaded, File0} ->
[{Module,Clauses}] =
@@ -2105,24 +2354,32 @@ do_parallel_analysis_to_file(Module, OutFile, Opts, Loaded, From, State) ->
{imported, File0, _} ->
File0
end,
+ HTML = lists:member(html,Opts),
+ OutFile =
+ case proplists:get_value(outfile,Opts) of
+ undefined ->
+ outfilename(proplists:get_value(outdir,Opts),Module,HTML);
+ F ->
+ F
+ end,
+ reply(From, do_analyse_to_file(Module,File,OutFile,HTML,State)).
+
+do_analyse_to_file(Module,File,OutFile,HTML,State) ->
case find_source(Module, File) of
{beam,_BeamFile} ->
- reply(From, {error,no_source_code_found});
+ {error,{no_source_code_found,Module}};
ErlFile ->
analyse_info(Module,State#main_state.imported),
- HTML = lists:member(html,Opts),
- R = do_analyse_to_file(Module,OutFile,
- ErlFile,HTML),
- reply(From, R)
+ do_analyse_to_file1(Module,OutFile,ErlFile,HTML)
end.
-%% do_analyse_to_file(Module,OutFile,ErlFile) -> {ok,OutFile} | {error,Error}
+%% do_analyse_to_file1(Module,OutFile,ErlFile) -> {ok,OutFile} | {error,Error}
%% Module = atom()
%% OutFile = ErlFile = string()
-do_analyse_to_file(Module, OutFile, ErlFile, HTML) ->
- case file:open(ErlFile, [read]) of
+do_analyse_to_file1(Module, OutFile, ErlFile, HTML) ->
+ case file:open(ErlFile, [read,raw,read_ahead]) of
{ok, InFd} ->
- case file:open(OutFile, [write]) of
+ case file:open(OutFile, [write,raw,delayed_write]) of
{ok, OutFd} ->
if HTML ->
Encoding = encoding(ErlFile),
@@ -2160,9 +2417,14 @@ do_analyse_to_file(Module, OutFile, ErlFile, HTML) ->
"**************************************"
"\n\n"]),
- print_lines(Module, InFd, OutFd, 1, HTML),
+ Pattern = {#bump{module=Module,line='$1',_='_'},'$2'},
+ MS = [{Pattern,[],[{{'$1','$2'}}]}],
+ CovLines = lists:keysort(1,ets:select(?COLLECTION_TABLE, MS)),
+ print_lines(Module, CovLines, InFd, OutFd, 1, HTML),
- if HTML -> io:format(OutFd,"</pre>\n</body>\n</html>\n",[]);
+ if
+ HTML ->
+ file:write(OutFd, "</pre>\n</body>\n</html>\n");
true -> ok
end,
@@ -2179,21 +2441,19 @@ do_analyse_to_file(Module, OutFile, ErlFile, HTML) ->
{error, {file, ErlFile, Reason}}
end.
-print_lines(Module, InFd, OutFd, L, HTML) ->
- case io:get_line(InFd, '') of
+
+print_lines(Module, CovLines, InFd, OutFd, L, HTML) ->
+ case file:read_line(InFd) of
eof ->
ignore;
- "%"++_=Line -> %Comment line - not executed.
- io:put_chars(OutFd, [tab(),escape_lt_and_gt(Line, HTML)]),
- print_lines(Module, InFd, OutFd, L+1, HTML);
- RawLine ->
+ {ok,"%"++_=Line} -> %Comment line - not executed.
+ file:write(OutFd, [tab(),escape_lt_and_gt(Line, HTML)]),
+ print_lines(Module, CovLines, InFd, OutFd, L+1, HTML);
+ {ok,RawLine} ->
Line = escape_lt_and_gt(RawLine,HTML),
- Pattern = {#bump{module=Module,line=L},'$1'},
- case ets:match(?COLLECTION_TABLE, Pattern) of
- [] ->
- io:put_chars(OutFd, [tab(),Line]);
- Ns ->
- N = lists:foldl(fun([Ni], Nacc) -> Nacc+Ni end, 0, Ns),
+ case CovLines of
+ [{L,N}|CovLines1] ->
+ %% N = lists:foldl(fun([Ni], Nacc) -> Nacc+Ni end, 0, Ns),
if
N=:=0, HTML=:=true ->
LineNoNL = Line -- "\n",
@@ -2201,19 +2461,22 @@ print_lines(Module, InFd, OutFd, L, HTML) ->
%%Str = string:right("0", 6, 32),
RedLine = ["<font color=red>",Str,fill1(),
LineNoNL,"</font>\n"],
- io:put_chars(OutFd, RedLine);
+ file:write(OutFd, RedLine);
N<1000000 ->
Str = string:right(integer_to_list(N), 6, 32),
- io:put_chars(OutFd, [Str,fill1(),Line]);
+ file:write(OutFd, [Str,fill1(),Line]);
N<10000000 ->
Str = integer_to_list(N),
- io:put_chars(OutFd, [Str,fill2(),Line]);
+ file:write(OutFd, [Str,fill2(),Line]);
true ->
Str = integer_to_list(N),
- io:put_chars(OutFd, [Str,fill3(),Line])
- end
- end,
- print_lines(Module, InFd, OutFd, L+1, HTML)
+ file:write(OutFd, [Str,fill3(),Line])
+ end,
+ print_lines(Module, CovLines1, InFd, OutFd, L+1, HTML);
+ _ ->
+ file:write(OutFd, [tab(),Line]),
+ print_lines(Module, CovLines, InFd, OutFd, L+1, HTML)
+ end
end.
tab() -> " | ".
@@ -2223,7 +2486,7 @@ fill3() -> "| ".
%%%--Export--------------------------------------------------------------
do_export(Module, OutFile, From, State) ->
- case file:open(OutFile,[write,binary,raw]) of
+ case file:open(OutFile,[write,binary,raw,delayed_write]) of
{ok,Fd} ->
Reply =
case Module of
@@ -2362,21 +2625,21 @@ do_reset_collection_table(Module) ->
ets:match_delete(?COLLECTION_TABLE, {#bump{module=Module},'_'}).
%% do_reset(Module) -> ok
-%% The reset is done on a per-clause basis to avoid building
+%% The reset is done on ?CHUNK_SIZE number of bumps to avoid building
%% long lists in the case of very large modules
do_reset(Module) ->
- [{Module,Clauses}] = ets:lookup(?COVER_CLAUSE_TABLE, Module),
- do_reset2(Clauses).
+ Pattern = {#bump{module=Module, _='_'}, '$1'},
+ MatchSpec = [{Pattern,[{'=/=','$1',0}],['$_']}],
+ Match = ets:select(?COVER_TABLE,MatchSpec,?CHUNK_SIZE),
+ do_reset2(Match).
-do_reset2([{M,F,A,C,_L}|Clauses]) ->
- Pattern = {#bump{module=M, function=F, arity=A, clause=C}, '_'},
- Bumps = ets:match_object(?COVER_TABLE, Pattern),
+do_reset2({Bumps,Continuation}) ->
lists:foreach(fun({Bump,_N}) ->
ets:insert(?COVER_TABLE, {Bump,0})
end,
Bumps),
- do_reset2(Clauses);
-do_reset2([]) ->
+ do_reset2(ets:select(Continuation));
+do_reset2('$end_of_table') ->
ok.
do_clear(Module) ->
@@ -2419,31 +2682,43 @@ escape_lt_and_gt1([],Acc) ->
escape_lt_and_gt1([H|T],Acc) ->
escape_lt_and_gt1(T,[H|Acc]).
-pmap(Fun, List) ->
- pmap(Fun, List, 20).
-pmap(Fun, List, Limit) ->
- pmap(Fun, List, [], Limit, 0, []).
-pmap(Fun, [E | Rest], Pids, Limit, Cnt, Acc) when Cnt < Limit ->
- Collector = self(),
- Pid = spawn_link(fun() ->
- ?SPAWN_DBG(pmap,E),
- Collector ! {res,self(),Fun(E)}
- end),
- erlang:monitor(process, Pid),
- pmap(Fun, Rest, Pids ++ [Pid], Limit, Cnt + 1, Acc);
-pmap(Fun, List, [Pid | Pids], Limit, Cnt, Acc) ->
- receive
- {'DOWN', _Ref, process, X, _} when is_pid(X) ->
- pmap(Fun, List, [Pid | Pids], Limit, Cnt - 1, Acc);
- {res, Pid, Res} ->
- pmap(Fun, List, Pids, Limit, Cnt, [Res | Acc])
- end;
-pmap(_Fun, [], [], _Limit, 0, Acc) ->
- lists:reverse(Acc);
-pmap(Fun, [], [], Limit, Cnt, Acc) ->
+%%%--Internal functions for parallelization------------------------------
+pmap(Fun,List) ->
+ NTot = length(List),
+ NProcs = erlang:system_info(schedulers) * 2,
+ NPerProc = (NTot div NProcs) + 1,
+ Mons = pmap_spawn(Fun,NPerProc,List,[]),
+ pmap_collect(Mons,[]).
+
+pmap_spawn(_,_,[],Mons) ->
+ Mons;
+pmap_spawn(Fun,NPerProc,List,Mons) ->
+ {L1,L2} = if length(List)>=NPerProc -> lists:split(NPerProc,List);
+ true -> {List,[]} % last chunk
+ end,
+ Mon =
+ spawn_monitor(
+ fun() ->
+ exit({pmap_done,lists:map(Fun,L1)})
+ end),
+ pmap_spawn(Fun,NPerProc,L2,[Mon|Mons]).
+
+pmap_collect([],Acc) ->
+ lists:append(Acc);
+pmap_collect(Mons,Acc) ->
receive
- {'DOWN', _Ref, process, X, _} when is_pid(X) ->
- pmap(Fun, [], [], Limit, Cnt - 1, Acc)
+ {'DOWN', Ref, process, Pid, {pmap_done,Result}} ->
+ pmap_collect(lists:delete({Pid,Ref},Mons),[Result|Acc]);
+ {'DOWN', Ref, process, Pid, Reason} = Down ->
+ case lists:member({Pid,Ref},Mons) of
+ true ->
+ %% Something went really wrong - don't hang!
+ exit(Reason);
+ false ->
+ %% This should be handled somewhere else
+ self() ! Down,
+ pmap_collect(Mons,Acc)
+ end
end.
%%%-----------------------------------------------------------------
diff --git a/lib/tools/src/lcnt.erl b/lib/tools/src/lcnt.erl
index f1251fddab..d5ba8aa52f 100644
--- a/lib/tools/src/lcnt.erl
+++ b/lib/tools/src/lcnt.erl
@@ -305,7 +305,7 @@ handle_call({inspect, Lockname, InOpts}, _From, #state{ duration=Duration, locks
{true, true} -> locks_ids(Filtered);
_ -> []
end,
- Combos = combine_classes(Filtered, proplists:get_value(combine, Opts)),
+ Combos = combine_classes(Filtered, proplists:get_value(combine, Opts)),
case proplists:get_value(locations, Opts) of
true ->
lists:foreach(fun
@@ -329,9 +329,8 @@ handle_call({inspect, Lockname, InOpts}, _From, #state{ duration=Duration, locks
end
end, Combos);
_ ->
- Print1 = locks2print(Combos, Duration),
- Print2 = filter_print(Print1, Opts),
- print_lock_information(Print2, proplists:get_value(print, Opts))
+ Print = filter_print(locks2print(Combos, Duration), Opts),
+ print_lock_information(Print, proplists:get_value(print, Opts))
end,
{reply, ok, State};
@@ -357,8 +356,7 @@ handle_call({histogram, Lockname, InOpts}, _From, #state{ duration=Duration, loc
{thresholds, [{tries, -1}, {colls, -1}, {time, -1}]}], Opts),
Prints = locks2print([L], Duration),
print_lock_information(Prints, proplists:get_value(print, Opts1)),
- print_full_histogram(SumStats#stats.hist),
- io:format("~n")
+ print_full_histogram(SumStats#stats.hist)
end, Combos),
{reply, ok, State};
@@ -509,20 +507,23 @@ filter_locks(Locks, Lockname) ->
% 4. max length of locks
filter_print(PLs, Opts) ->
- TLs = threshold_locks(PLs, proplists:get_value(thresholds, Opts, [])),
- SLs = sort_locks(TLs, proplists:get_value(sort, Opts, time)),
- CLs = cut_locks(SLs, proplists:get_value(max_locks, Opts, none)),
- reverse_locks(CLs, not proplists:get_value(reverse,Opts, false)).
-
-sort_locks(Locks, name) -> lists:keysort(#print.name, Locks);
-sort_locks(Locks, id) -> lists:keysort(#print.id, Locks);
-sort_locks(Locks, type) -> lists:keysort(#print.type, Locks);
-sort_locks(Locks, tries) -> lists:keysort(#print.tries, Locks);
-sort_locks(Locks, colls) -> lists:keysort(#print.colls, Locks);
-sort_locks(Locks, ratio) -> lists:keysort(#print.cr, Locks);
-sort_locks(Locks, time) -> lists:keysort(#print.time, Locks);
+ TLs = threshold_locks(PLs, proplists:get_value(thresholds, Opts, [])),
+ SLs = sort_locks(TLs, proplists:get_value(sort, Opts, time)),
+ CLs = cut_locks(SLs, proplists:get_value(max_locks, Opts, none)),
+ reverse_locks(CLs, proplists:get_value(reverse, Opts, false)).
+
+sort_locks(Locks, name) -> reverse_sort_locks(#print.name, Locks);
+sort_locks(Locks, id) -> reverse_sort_locks(#print.id, Locks);
+sort_locks(Locks, type) -> reverse_sort_locks(#print.type, Locks);
+sort_locks(Locks, tries) -> reverse_sort_locks(#print.tries, Locks);
+sort_locks(Locks, colls) -> reverse_sort_locks(#print.colls, Locks);
+sort_locks(Locks, ratio) -> reverse_sort_locks(#print.cr, Locks);
+sort_locks(Locks, time) -> reverse_sort_locks(#print.time, Locks);
sort_locks(Locks, _) -> sort_locks(Locks, time).
+reverse_sort_locks(Ix, Locks) ->
+ lists:reverse(lists:keysort(Ix, Locks)).
+
% cut locks not above certain thresholds
threshold_locks(Locks, Thresholds) ->
Tries = proplists:get_value(tries, Thresholds, -1),
@@ -647,15 +648,19 @@ format_histogram(Tup) when is_tuple(Tup) ->
_ -> string_histogram([case V of 0 -> 0; _ -> V/Max end || V <- Vs])
end.
-string_histogram([0|Vs]) ->
- [$\s|string_histogram(Vs)];
-string_histogram([V|Vs]) when V > 0.66 ->
- [$X|string_histogram(Vs)];
-string_histogram([V|Vs]) when V > 0.33 ->
- [$x|string_histogram(Vs)];
-string_histogram([_|Vs]) ->
- [$.|string_histogram(Vs)];
-string_histogram([]) -> [].
+string_histogram(Vs) ->
+ [$||histogram_values_to_string(Vs,$|)].
+
+histogram_values_to_string([0|Vs],End) ->
+ [$\s|histogram_values_to_string(Vs,End)];
+histogram_values_to_string([V|Vs],End) when V > 0.66 ->
+ [$X|histogram_values_to_string(Vs,End)];
+histogram_values_to_string([V|Vs],End) when V > 0.33 ->
+ [$x|histogram_values_to_string(Vs,End)];
+histogram_values_to_string([_|Vs],End) ->
+ [$.|histogram_values_to_string(Vs,End)];
+histogram_values_to_string([],End) ->
+ [End].
%% state making
@@ -778,7 +783,7 @@ auto_print_width(Locks, Print) ->
({print,print}, Out) -> [print|Out];
({Str, Len}, Out) -> [erlang:min(erlang:max(length(s(Str))+1,Len),80)|Out]
end, [], lists:zip(tuple_to_list(L), tuple_to_list(Max)))))
- end, #print{ id = 4, type = 5, entry = 5, name = 6, tries = 8, colls = 13, cr = 16, time = 11, dtr = 14, hist=20 },
+ end, #print{ id=4, type=5, entry=5, name=6, tries=8, colls=13, cr=16, time=11, dtr=14, hist=20 },
Locks),
% Setup the offsets for later pruning
Offsets = [
@@ -820,7 +825,7 @@ print_header(Opts) ->
cr = "collisions [%]",
time = "time [us]",
dtr = "duration [%]",
- hist = "histogram"
+ hist = "histogram [log2(us)]"
},
Divider = #print{
name = lists:duplicate(1 + length(Header#print.name), 45),
@@ -863,9 +868,9 @@ format_lock(L, [Opt|Opts]) ->
{time, W} -> [{space, W, s(L#print.time) } | format_lock(L, Opts)];
duration -> [{space, 20, s(L#print.dtr) } | format_lock(L, Opts)];
{duration, W} -> [{space, W, s(L#print.dtr) } | format_lock(L, Opts)];
- histogram -> [{space, 0, s(L#print.hist) } | format_lock(L, Opts)];
- {histogram, W} -> [{space, W, s(L#print.hist) } | format_lock(L, Opts)];
- _ -> format_lock(L, Opts)
+ histogram -> [{space, 20, s(L#print.hist) } | format_lock(L, Opts)];
+ {histogram, W} -> [{left, W - length(s(L#print.hist)) - 1, s(L#print.hist)} | format_lock(L, Opts)];
+ _ -> format_lock(L, Opts)
end.
print_state_information(#state{locks = Locks} = State) ->
@@ -926,6 +931,7 @@ s(T) -> term2string(T).
strings(Strings) -> strings(Strings, []).
strings([], Out) -> Out;
strings([{space, N, S} | Ss], Out) -> strings(Ss, Out ++ term2string(term2string("~~~ws", [N]), [S]));
+strings([{left, N, S} | Ss], Out) -> strings(Ss, Out ++ term2string(term2string(" ~~s~~~ws", [N]), [S,""]));
strings([{format, Format, S} | Ss], Out) -> strings(Ss, Out ++ term2string(Format, [S]));
strings([S|Ss], Out) -> strings(Ss, Out ++ term2string("~ts", [S])).
diff --git a/lib/tools/test/cover_SUITE.erl b/lib/tools/test/cover_SUITE.erl
index 80807b1d38..368fa6c3d1 100644
--- a/lib/tools/test/cover_SUITE.erl
+++ b/lib/tools/test/cover_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -33,6 +33,8 @@
-export([do_coverage/1]).
+-export([distribution_performance/1]).
+
-include_lib("test_server/include/test_server.hrl").
%%----------------------------------------------------------------------
@@ -170,10 +172,15 @@ compile(Config) when is_list(Config) ->
?line {ok, CWD} = file:get_cwd(),
?line Result2 = cover:compile_directory(CWD),
?line SortedResult = lists:sort(Result2),
- ?line [{error,_DFile},{ok,a},{ok,b},{ok,cc},{ok,f}] = SortedResult,
+ ?line [{error,DFile},{ok,a},{ok,b},{ok,cc},{ok,f}] = SortedResult,
?line [{ok,e}] = cover:compile_directory("d1"),
?line {error,enoent} = cover:compile_directory("d2"),
+ [] = cover:compile([]),
+ Result21 = cover:compile([a,b,"cc.erl",d,"f"]),
+ SortedResult21 = lists:sort(Result21),
+ [{error,DFile},{ok,a},{ok,b},{ok,cc},{ok,f}] = SortedResult21,
+
?line {ok,a} = cover:compile(a),
?line {ok,b} = compile:file(b),
?line code:purge(b),
@@ -213,8 +220,14 @@ compile(Config) when is_list(Config) ->
?line {error,non_existing} = cover:compile_beam(z),
?line [{ok,y}] = cover:compile_beam_directory("d"),
?line Result3 = lists:sort(cover:compile_beam_directory()),
- ?line [{error,{no_abstract_code,_XBeam}},{ok,crypt},{ok,v},{ok,w}] = Result3,
+ ?line [{error,{no_abstract_code,XBeam}},{ok,crypt},{ok,v},{ok,w}] = Result3,
?line {error,enoent} = cover:compile_beam_directory("d2"),
+
+ [] = cover:compile_beam([]),
+ Result31 = cover:compile_beam([crypt,"v.beam",w,"x"]),
+ SortedResult31 = lists:sort(Result31),
+ [{error,{no_abstract_code,XBeam}},{ok,crypt},{ok,v},{ok,w}] = SortedResult31,
+
?line decompile([v,w,y]),
?line Files = lsfiles(),
?line remove(files(Files, ".beam")).
@@ -239,20 +252,22 @@ analyse(Config) when is_list(Config) ->
?line done = a:start(5),
- ?line {ok, {a,{17,2}}} = cover:analyse(a, coverage, module),
- ?line {ok, [{{a,start,1},{6,0}},
- {{a,stop,1},{0,1}},
- {{a,pong,1},{1,0}},
- {{a,loop,3},{5,1}},
- {{a,trycatch,1},{4,0}},
- {{a,exit_kalle,0},{1,0}}]} = cover:analyse(a, coverage, function),
- ?line {ok, [{{a,start,1,1},{6,0}},
- {{a,stop,1,1},{0,1}},
- {{a,pong,1,1},{1,0}},
+ {ok, {a,{17,2}}=ACovMod} = cover:analyse(a, coverage, module),
+ {ok, [{{a,exit_kalle,0},{1,0}},
+ {{a,loop,3},{5,1}},
+ {{a,pong,1},{1,0}},
+ {{a,start,1},{6,0}},
+ {{a,stop,1},{0,1}},
+ {{a,trycatch,1},{4,0}}]=ACovFunc} =
+ cover:analyse(a, coverage, function),
+ {ok, [{{a,exit_kalle,0,1},{1,0}},
{{a,loop,3,1},{3,1}},
{{a,loop,3,2},{2,0}},
- {{a,trycatch,1,1},{4,0}},
- {{a,exit_kalle,0,1},{1,0}}]} = cover:analyse(a, coverage, clause),
+ {{a,pong,1,1},{1,0}},
+ {{a,start,1,1},{6,0}},
+ {{a,stop,1,1},{0,1}},
+ {{a,trycatch,1,1},{4,0}}]=ACovClause} =
+ cover:analyse(a, coverage, clause),
?line {ok, [{{a,9},{1,0}},
{{a,10},{1,0}},
{{a,11},{1,0}},
@@ -271,22 +286,22 @@ analyse(Config) when is_list(Config) ->
{{a,47},{1,0}},
{{a,49},{1,0}},
{{a,51},{1,0}},
- {{a,55},{1,0}}]} = cover:analyse(a, coverage, line),
-
- ?line {ok, {a,15}} = cover:analyse(a, calls, module),
- ?line {ok, [{{a,start,1},1},
- {{a,stop,1},0},
- {{a,pong,1},5},
- {{a,loop,3},6},
- {{a,trycatch,1},2},
- {{a,exit_kalle,0},1}]} = cover:analyse(a, calls, function),
- ?line {ok, [{{a,start,1,1},1},
- {{a,stop,1,1},0},
- {{a,pong,1,1},5},
- {{a,loop,3,1},5},
- {{a,loop,3,2},1},
- {{a,trycatch,1,1},2},
- {{a,exit_kalle,0,1},1}]} = cover:analyse(a, calls, clause),
+ {{a,55},{1,0}}]=ACovLine} = cover:analyse(a, coverage, line),
+
+ {ok, {a,15}=ACallsMod} = cover:analyse(a, calls, module),
+ {ok, [{{a,exit_kalle,0},1},
+ {{a,loop,3},6},
+ {{a,pong,1},5},
+ {{a,start,1},1},
+ {{a,stop,1},0},
+ {{a,trycatch,1},2}]=ACallsFunc} = cover:analyse(a, calls, function),
+ {ok, [{{a,exit_kalle,0,1},1},
+ {{a,loop,3,1},5},
+ {{a,loop,3,2},1},
+ {{a,pong,1,1},5},
+ {{a,start,1,1},1},
+ {{a,stop,1,1},0},
+ {{a,trycatch,1,1},2}]=ACallsClause} = cover:analyse(a, calls, clause),
?line {ok, [{{a,9},1},
{{a,10},1},
{{a,11},1},
@@ -305,27 +320,85 @@ analyse(Config) when is_list(Config) ->
{{a,47},1},
{{a,49},1},
{{a,51},2},
- {{a,55},1}]} = cover:analyse(a, calls, line),
-
- ?line {ok, [{{a,start,1},{6,0}},
- {{a,stop,1},{0,1}},
- {{a,pong,1},{1,0}},
- {{a,loop,3},{5,1}},
- {{a,trycatch,1},{4,0}},
- {{a,exit_kalle,0},{1,0}}]} = cover:analyse(a),
- ?line {ok, {a,{17,2}}} = cover:analyse(a, module),
- ?line {ok, [{{a,start,1},1},
- {{a,stop,1},0},
- {{a,pong,1},5},
- {{a,loop,3},6},
- {{a,trycatch,1},2},
- {{a,exit_kalle,0},1}]} = cover:analyse(a, calls),
+ {{a,55},1}]=ACallsLine} = cover:analyse(a, calls, line),
+
+ {ok,ACovFunc} = cover:analyse(a),
+ {ok,ACovMod} = cover:analyse(a, module),
+ {ok,ACallsFunc} = cover:analyse(a, calls),
?line {ok, "a.COVER.out"} = cover:analyse_to_file(a),
?line {ok, "e.COVER.out"} = cover:analyse_to_file(e),
?line {ok, "a.COVER.html"} = cover:analyse_to_file(a,[html]),
?line {ok, "e.COVER.html"} = cover:analyse_to_file(e,[html]),
+ %% Analyse all modules
+ Modules = cover:modules(),
+ N = length(Modules),
+
+ {result,CovFunc,[]} = cover:analyse(), % default = coverage, function
+ ACovFunc = [A || {{a,_,_},_}=A<-CovFunc],
+
+ {result,CovMod,[]} = cover:analyse(coverage,module),
+ ACovMod = lists:keyfind(a,1,CovMod),
+
+ {result,CovClause,[]} = cover:analyse(coverage,clause),
+ ACovClause = [A || {{a,_,_,_},_}=A<-CovClause],
+
+ {result,CovLine,[]} = cover:analyse(coverage,line),
+ ACovLine = [A || {{a,_},_}=A<-CovLine],
+
+ {result,CallsFunc,[]} = cover:analyse(calls,function),
+ ACallsFunc = [A || {{a,_,_},_}=A<-CallsFunc],
+
+ {result,CallsMod,[]} = cover:analyse(calls,module),
+ ACallsMod = lists:keyfind(a,1,CallsMod),
+
+ {result,CallsClause,[]} = cover:analyse(calls,clause),
+ ACallsClause = [A || {{a,_,_,_},_}=A<-CallsClause],
+
+ {result,CallsLine,[]} = cover:analyse(calls,line),
+ ACallsLine = [A || {{a,_},_}=A<-CallsLine],
+
+ {result,AllToFile,[]} = cover:analyse_to_file(),
+ N = length(AllToFile),
+ true = lists:member("a.COVER.out",AllToFile),
+ {result,AllToFileHtml,[]} = cover:analyse_to_file([html]),
+ N = length(AllToFileHtml),
+ true = lists:member("a.COVER.html",AllToFileHtml),
+
+ %% Analyse list of modules
+ %% Listing all modules so we can compare result with above result
+ %% from analysing all.
+
+ {result,CovFunc1,[]} = cover:analyse(Modules), % default = coverage, function
+ true = lists:sort(CovFunc) == lists:sort(CovFunc1),
+
+ {result,CovMod1,[]} = cover:analyse(Modules,coverage,module),
+ true = lists:sort(CovMod) == lists:sort(CovMod1),
+
+ {result,CovClause1,[]} = cover:analyse(Modules,coverage,clause),
+ true = lists:sort(CovClause) == lists:sort(CovClause1),
+
+ {result,CovLine1,[]} = cover:analyse(Modules,coverage,line),
+ true = lists:sort(CovLine) == lists:sort(CovLine1),
+
+ {result,CallsFunc1,[]} = cover:analyse(Modules,calls,function),
+ true = lists:sort(CallsFunc1) == lists:sort(CallsFunc1),
+
+ {result,CallsMod1,[]} = cover:analyse(Modules,calls,module),
+ true = lists:sort(CallsMod) == lists:sort(CallsMod1),
+
+ {result,CallsClause1,[]} = cover:analyse(Modules,calls,clause),
+ true = lists:sort(CallsClause) == lists:sort(CallsClause1),
+
+ {result,CallsLine1,[]} = cover:analyse(Modules,calls,line),
+ true = lists:sort(CallsLine) == lists:sort(CallsLine1),
+
+ {result,AllToFile1,[]} = cover:analyse_to_file(Modules),
+ true = lists:sort(AllToFile) == lists:sort(AllToFile1),
+ {result,AllToFileHtml1,[]} = cover:analyse_to_file(Modules,[html]),
+ true = lists:sort(AllToFileHtml) == lists:sort(AllToFileHtml1),
+
%% analyse_to_file of file which is compiled from beam
?line {ok,f} = compile:file(f,[debug_info]),
?line code:purge(f),
@@ -348,14 +421,17 @@ analyse(Config) when is_list(Config) ->
{module,z} = code:load_file(z),
{ok,z} = cover:compile_beam(z),
ok = file:delete("z.erl"),
- {error,no_source_code_found} = cover:analyse_to_file(z),
+ {error,{no_source_code_found,z}} = cover:analyse_to_file(z),
+ {result,[],[{no_source_code_found,z}]} = cover:analyse_to_file([z]),
code:purge(z),
code:delete(z),
?line {error,{not_cover_compiled,b}} = cover:analyse(b),
?line {error,{not_cover_compiled,g}} = cover:analyse(g),
+ {result,[],[{not_cover_compiled,b}]} = cover:analyse([b]),
?line {error,{not_cover_compiled,b}} = cover:analyse_to_file(b),
- ?line {error,{not_cover_compiled,g}} = cover:analyse_to_file(g).
+ {error,{not_cover_compiled,g}} = cover:analyse_to_file(g),
+ {result,[],[{not_cover_compiled,g}]} = cover:analyse_to_file([g]).
misc(suite) -> [];
misc(Config) when is_list(Config) ->
@@ -680,6 +756,119 @@ stop_node_after_disconnect(Config) ->
?t:stop_node(N1),
ok.
+distribution_performance(Config) ->
+ PrivDir = ?config(priv_dir,Config),
+ Dir = filename:join(PrivDir,"distribution_performance"),
+ AllFiles = filename:join(Dir,"*"),
+ ok = filelib:ensure_dir(AllFiles),
+ code:add_patha(Dir),
+ M = 9, % Generate M modules
+ F = 210, % with F functions
+ C = 10, % and each function of C clauses
+ Mods = generate_modules(M,F,C,Dir),
+
+% ?t:break(""),
+
+ NodeName = cover_SUITE_distribution_performance,
+ {ok,N1} = ?t:start_node(NodeName,peer,[{start_cover,false}]),
+ %% CFun = fun() ->
+ %% [{ok,_} = cover:compile_beam(Mod) || Mod <- Mods]
+ %% end,
+ CFun = fun() -> cover:compile_beam(Mods) end,
+ {CT,CA} = timer:tc(CFun),
+% erlang:display(CA),
+ erlang:display({compile,CT}),
+
+ {SNT,_} = timer:tc(fun() -> {ok,[N1]} = cover:start(nodes()) end),
+ erlang:display({start_node,SNT}),
+
+ [1 = rpc:call(N1,Mod,f1,[1]) || Mod <- Mods],
+
+% Fun = fun() -> [cover:analyse(Mod,calls,function) || Mod<-Mods] end,
+% Fun = fun() -> analyse_all(Mods,calls,function) end,
+% Fun = fun() -> cover:analyse('_',calls,function) end,
+ Fun = fun() -> cover:analyse(Mods,calls,function) end,
+
+% Fun = fun() -> [begin cover:analyse_to_file(Mod,[html]) end || Mod<-Mods] end,
+% Fun = fun() -> analyse_all_to_file(Mods,[html]) end,
+% Fun = fun() -> cover:analyse_to_file(Mods,[html]) end,
+% Fun = fun() -> cover:analyse_to_file([html]) end,
+
+% Fun = fun() -> cover:reset() end,
+
+ {AT,A} = timer:tc(Fun),
+ erlang:display({analyse,AT}),
+% erlang:display(lists:sort([X || X={_MFA,N} <- lists:append([L || {ok,L}<-A]), N=/=0])),
+
+ %% fprof:apply(Fun, [],[{procs,[whereis(cover_server)]}]),
+ %% fprof:profile(),
+ %% fprof:analyse(dest,[]),
+
+ {SNT2,_} = timer:tc(fun() -> ?t:stop_node(N1) end),
+ erlang:display({stop_node,SNT2}),
+
+ code:del_path(Dir),
+ Files = filelib:wildcard(AllFiles),
+ [ok = file:delete(File) || File <- Files],
+ ok = file:del_dir(Dir),
+ ok.
+
+%% Run analysis in parallel
+analyse_all(Mods,Analysis,Level) ->
+ Pids = [begin
+ Pid = spawn(fun() ->
+ {ok,A} = cover:analyse(Mod,Analysis,Level),
+ exit(A)
+ end),
+ erlang:monitor(process,Pid),
+ Pid
+ end || Mod <- Mods],
+ get_downs(Pids,[]).
+
+analyse_all_to_file(Mods,Opts) ->
+ Pids = [begin
+ Pid = cover:async_analyse_to_file(Mod,Opts),
+ erlang:monitor(process,Pid),
+ Pid
+ end || Mod <- Mods],
+ get_downs(Pids,[]).
+
+get_downs([],Acc) ->
+ Acc;
+get_downs(Pids,Acc) ->
+ receive
+ {'DOWN', _Ref, _Type, Pid, A} ->
+ get_downs(lists:delete(Pid,Pids),[A|Acc])
+ end.
+
+generate_modules(0,_,_,_) ->
+ [];
+generate_modules(M,F,C,Dir) ->
+ ModStr = "m" ++ integer_to_list(M),
+ Mod = list_to_atom(ModStr),
+ Src = ["-module(",ModStr,").\n"
+ "-compile(export_all).\n" |
+ generate_functions(F,C)],
+ Erl = filename:join(Dir,ModStr++".erl"),
+ ok = file:write_file(Erl,Src),
+ {ok,Mod} = compile:file(Erl,[{outdir,Dir},debug_info,report]),
+ [Mod | generate_modules(M-1,F,C,Dir)].
+
+generate_functions(0,_) ->
+ [];
+generate_functions(F,C) ->
+ Func = "f" ++ integer_to_list(F),
+ [generate_clauses(C,Func) | generate_functions(F-1,C)].
+
+generate_clauses(0,_) ->
+ [];
+generate_clauses(C,Func) ->
+ CStr = integer_to_list(C),
+ Sep = if C==1 -> "."; true -> ";" end,
+ [Func,"(",CStr,") -> ",CStr,Sep,"\n" |
+ generate_clauses(C-1,Func)].
+
+
export_import(suite) -> [];
export_import(Config) when is_list(Config) ->
?line DataDir = ?config(data_dir, Config),
@@ -788,10 +977,11 @@ otp_5031(Config) when is_list(Config) ->
Dog = ?t:timetrap(?t:seconds(10)),
- ?line {ok,N1} = ?t:start_node(cover_SUITE_distribution1,slave,[]),
+ {ok,N1} = ?t:start_node(cover_SUITE_otp_5031,slave,[]),
?line {ok,[N1]} = cover:start(N1),
?line {error,not_main_node} = rpc:call(N1,cover,modules,[]),
?line cover:stop(),
+ ?t:stop_node(N1),
?t:timetrap_cancel(Dog),
ok.
@@ -1005,6 +1195,7 @@ otp_7095(Config) when is_list(Config) ->
ok.
+
otp_8270(doc) ->
["OTP-8270. Bug."];
otp_8270(suite) -> [];
@@ -1020,7 +1211,7 @@ otp_8270(Config) when is_list(Config) ->
?line {ok,N3} = ?t:start_node(cover_n3,slave,As),
timer:sleep(500),
- cover:start(nodes()),
+ {ok,[_,_,_]} = cover:start(nodes()),
Test = <<
"-module(m).\n"
@@ -1058,6 +1249,7 @@ otp_8270(Config) when is_list(Config) ->
?line {N2,true} = {N2,is_list(N2_info)},
?line {N3,true} = {N3,is_list(N3_info)},
+ exit(Pid1,kill),
?line ?t:stop_node(N1),
?line ?t:stop_node(N2),
?line ?t:stop_node(N3),
@@ -1572,7 +1764,9 @@ is_unloaded(What) ->
end.
check_f_calls(F1,F2) ->
- {ok,[{{f,f1,0},F1},{{f,f2,0},F2}|_]} = cover:analyse(f,calls,function).
+ {ok,A} = cover:analyse(f,calls,function),
+ {_,F1} = lists:keyfind({f,f1,0},1,A),
+ {_,F2} = lists:keyfind({f,f2,0},1,A).
cover_which_nodes(Expected) ->
case cover:which_nodes() of
diff --git a/lib/wx/src/wxe_server.erl b/lib/wx/src/wxe_server.erl
index 465b9da2e0..153e2475ba 100644
--- a/lib/wx/src/wxe_server.erl
+++ b/lib/wx/src/wxe_server.erl
@@ -223,14 +223,18 @@ handle_connect(Object, #evh{handler=undefined, cb=Callback} = EvData0,
Error ->
{reply, Error, State0}
end;
-handle_connect(Object, EvData=#evh{handler=Handler},
+handle_connect(Object, EvData=#evh{handler=Handler},
From, State0 = #state{users=Users}) ->
%% Correct process is already listening just register it
put(Handler, From),
- User0 = #user{events=Listeners0} = gb_trees:get(From, Users),
- User = User0#user{events=[{Object,EvData}|Listeners0]},
- State = State0#state{users=gb_trees:update(From, User, Users)},
- {reply, ok, State}.
+ case gb_trees:lookup(From, Users) of
+ {value, User0 = #user{events=Listeners0}} ->
+ User = User0#user{events=[{Object,EvData}|Listeners0]},
+ State = State0#state{users=gb_trees:update(From, User, Users)},
+ {reply, ok, State};
+ none -> %% We are closing up the shop
+ {reply, {error, terminating}, State0}
+ end.
invoke_cb({{Ev=#wx{}, Ref=#wx_ref{}}, FunId,_}, _S) ->
%% Event callbacks
diff --git a/lib/wx/test/wx_event_SUITE.erl b/lib/wx/test/wx_event_SUITE.erl
index 2c6c59bb55..3252547c9b 100644
--- a/lib/wx/test/wx_event_SUITE.erl
+++ b/lib/wx/test/wx_event_SUITE.erl
@@ -336,12 +336,14 @@ connect_in_callback(Config) ->
end}]),
wxWindow:show(F1),
receive
- {continue, F1} -> Tester ! {continue, F1}
+ {continue, F1} ->
+ true = wxFrame:disconnect(F1, size),
+ Tester ! {continue, F1}
end
end,
- wxFrame:connect(Frame,size,
+ wxFrame:connect(Frame,show,
[{callback,
- fun(#wx{event=#wxSize{}},_SizeEv) ->
+ fun(#wx{event=#wxShow{}},_SizeEv) ->
io:format("Frame got size~n",[]),
spawn(TestWindow)
end}]),